You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by gi...@apache.org on 2020/06/18 14:47:41 UTC

[hbase-site] branch asf-site updated: Published site at bd79c4065ccb13a5e217d844376b3e7b9489d2fe.

This is an automated email from the ASF dual-hosted git repository.

git-site-role pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/hbase-site.git


The following commit(s) were added to refs/heads/asf-site by this push:
     new 97e5813  Published site at bd79c4065ccb13a5e217d844376b3e7b9489d2fe.
97e5813 is described below

commit 97e5813d812f12bab73d1c3e47d758f9f4af8ca9
Author: jenkins <bu...@apache.org>
AuthorDate: Thu Jun 18 14:47:08 2020 +0000

    Published site at bd79c4065ccb13a5e217d844376b3e7b9489d2fe.
---
 acid-semantics.html                                |    2 +-
 apache_hbase_reference_guide.pdf                   |    4 +-
 book.html                                          |    2 +-
 bulk-loads.html                                    |    2 +-
 checkstyle-aggregate.html                          |   26 +-
 coc.html                                           |    2 +-
 dependencies.html                                  |    2 +-
 dependency-convergence.html                        |    2 +-
 dependency-info.html                               |    2 +-
 dependency-management.html                         |    2 +-
 devapidocs/constant-values.html                    |   46 +-
 devapidocs/index-all.html                          |   36 +-
 .../class-use/SplitLogWorkerCoordination.html      |    6 +-
 .../regionserver/class-use/LastSequenceId.html     |    2 +-
 .../class-use/RegionServerServices.html            |    2 +-
 .../HBaseSaslRpcClient.WrappedInputStream.html     |   14 +-
 .../HBaseSaslRpcClient.WrappedOutputStream.html    |    6 +-
 .../hadoop/hbase/security/HBaseSaslRpcClient.html  |    8 +-
 .../util/class-use/CancelableProgressable.html     |    4 +-
 .../wal/BoundedRecoveredHFilesOutputSink.html      |   81 +-
 .../hadoop/hbase/wal/OutputSink.WriterThread.html  |   20 +-
 .../org/apache/hadoop/hbase/wal/OutputSink.html    |   33 +-
 .../hadoop/hbase/wal/RecoveredEditsOutputSink.html |   31 +-
 .../wal/WALSplitter.CorruptedLogFileException.html |    8 +-
 .../hbase/wal/WALSplitter.PipelineController.html  |   12 +-
 .../org/apache/hadoop/hbase/wal/WALSplitter.html   |  190 +--
 .../hadoop/hbase/wal/class-use/OutputSink.html     |    4 +-
 .../hadoop/hbase/wal/class-use/WALFactory.html     |    6 +-
 .../apache/hadoop/hbase/wal/package-summary.html   |    8 +-
 .../org/apache/hadoop/hbase/wal/package-use.html   |    4 +-
 .../HBaseSaslRpcClient.WrappedInputStream.html     |  278 ++---
 .../HBaseSaslRpcClient.WrappedOutputStream.html    |  278 ++---
 .../hadoop/hbase/security/HBaseSaslRpcClient.html  |  278 ++---
 .../wal/BoundedRecoveredHFilesOutputSink.html      |  413 +++----
 .../hadoop/hbase/wal/OutputSink.WriterThread.html  |  325 ++---
 .../org/apache/hadoop/hbase/wal/OutputSink.html    |  325 ++---
 .../hadoop/hbase/wal/RecoveredEditsOutputSink.html |  258 ++--
 .../wal/WALSplitter.CorruptedLogFileException.html | 1046 ++++++++--------
 .../hbase/wal/WALSplitter.PipelineController.html  | 1046 ++++++++--------
 .../org/apache/hadoop/hbase/wal/WALSplitter.html   | 1046 ++++++++--------
 downloads.html                                     |    2 +-
 export_control.html                                |    2 +-
 index.html                                         |    2 +-
 issue-tracking.html                                |    2 +-
 mail-lists.html                                    |    2 +-
 metrics.html                                       |    2 +-
 old_news.html                                      |    2 +-
 plugin-management.html                             |    2 +-
 plugins.html                                       |    2 +-
 poweredbyhbase.html                                |    2 +-
 project-info.html                                  |    2 +-
 project-reports.html                               |    2 +-
 project-summary.html                               |    2 +-
 pseudo-distributed.html                            |    2 +-
 replication.html                                   |    2 +-
 resources.html                                     |    2 +-
 source-repository.html                             |    2 +-
 sponsors.html                                      |    2 +-
 supportingprojects.html                            |    2 +-
 team-list.html                                     |    2 +-
 testdevapidocs/index-all.html                      |    2 +
 .../apache/hadoop/hbase/backup/package-tree.html   |    2 +-
 .../apache/hadoop/hbase/io/hfile/package-tree.html |    2 +-
 .../org/apache/hadoop/hbase/package-tree.html      |   12 +-
 .../hadoop/hbase/procedure2/package-tree.html      |    4 +-
 .../hadoop/hbase/regionserver/package-tree.html    |    4 +-
 .../hbase/security/TestHBaseSaslRpcClient.html     |   72 +-
 .../org/apache/hadoop/hbase/test/package-tree.html |    2 +-
 .../hadoop/hbase/wal/TestWALSplitToHFile.html      |   76 +-
 .../hbase/security/TestHBaseSaslRpcClient.html     |  561 ++++-----
 .../hadoop/hbase/wal/TestWALSplitToHFile.html      | 1248 ++++++++++----------
 71 files changed, 4033 insertions(+), 3852 deletions(-)

diff --git a/acid-semantics.html b/acid-semantics.html
index b6a6d98..40737d8 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -467,7 +467,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 2c086b0..2dd48f1 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.rc.2, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20200617143137+00'00')
-/CreationDate (D:20200617144300+00'00')
+/ModDate (D:20200618143158+00'00')
+/CreationDate (D:20200618144408+00'00')
 >>
 endobj
 2 0 obj
diff --git a/book.html b/book.html
index 17e8c86..7677244 100644
--- a/book.html
+++ b/book.html
@@ -45927,7 +45927,7 @@ org/apache/hadoop/hbase/security/access/AccessControlClient.revoke:(Lorg/apache/
 <div id="footer">
 <div id="footer-text">
 Version 3.0.0-SNAPSHOT<br>
-Last updated 2020-06-17 14:31:37 UTC
+Last updated 2020-06-18 14:31:58 UTC
 </div>
 </div>
 <script type="text/x-mathjax-config">
diff --git a/bulk-loads.html b/bulk-loads.html
index ddcb88d..0744848 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -172,7 +172,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 83806fd..75ba17f 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7339,7 +7339,7 @@
 <tr class="a">
 <td>annotation</td>
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_annotation.html#MissingDeprecated">MissingDeprecated</a></td>
-<td>8</td>
+<td>9</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="b">
 <td>blocks</td>
@@ -7443,7 +7443,7 @@
 <tr class="b">
 <td></td>
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription">NonEmptyAtclauseDescription</a></td>
-<td>2433</td>
+<td>2432</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="a">
 <td>misc</td>
@@ -7461,12 +7461,12 @@
 <ul>
 <li>max: <tt>&quot;100&quot;</tt></li>
 <li>ignorePattern: <tt>&quot;^package.*|^import.*|a href|href|http://|https://|ftp://|org.apache.thrift.|com.google.protobuf.|hbase.protobuf.generated&quot;</tt></li></ul></td>
-<td>933</td>
+<td>932</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="b">
 <td></td>
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_sizes.html#MethodLength">MethodLength</a></td>
-<td>54</td>
+<td>55</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="a">
 <td>whitespace</td>
@@ -11777,8 +11777,8 @@
 <td>251</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
-<td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
+<td>annotation</td>
+<td>MissingDeprecated</td>
 <td>Javadoc comment at column 47 has parse error. Details: no viable alternative at input '&lt;qualifier,' while parsing HTML_ELEMENT</td>
 <td>387</td></tr>
 <tr class="b">
@@ -64658,19 +64658,19 @@
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>192</td></tr>
+<td>202</td></tr>
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>misc</td>
 <td>ArrayTypeStyle</td>
 <td>Array brackets at illegal position.</td>
-<td>219</td></tr>
+<td>229</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
 <td>NonEmptyAtclauseDescription</td>
 <td>At-clause should have a non-empty description.</td>
-<td>255</td></tr></table></div>
+<td>265</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.security.HadoopSecurityEnabledUserProviderForTesting.java">org/apache/hadoop/hbase/security/HadoopSecurityEnabledUserProviderForTesting.java</h3>
 <table border="0" class="table table-striped">
@@ -78123,9 +78123,9 @@
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>sizes</td>
-<td>LineLength</td>
-<td>Line is longer than 100 characters (found 102).</td>
-<td>289</td></tr></table></div>
+<td>MethodLength</td>
+<td>Method length is 151 lines (max allowed is 150).</td>
+<td>255</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.zookeeper.ReadOnlyZKClient.java">org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.java</h3>
 <table border="0" class="table table-striped">
@@ -78323,7 +78323,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/coc.html b/coc.html
index 3031053..8d7e389 100644
--- a/coc.html
+++ b/coc.html
@@ -241,7 +241,7 @@ email to <a class="externalLink" href="mailto:private@hbase.apache.org">the priv
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependencies.html b/dependencies.html
index e9561c9..1b24c88 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -313,7 +313,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 47c3b0a..2665009 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -788,7 +788,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-info.html b/dependency-info.html
index 2c2ca64..e145ad9 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -194,7 +194,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-management.html b/dependency-management.html
index d66ff6f..02c7542 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -1096,7 +1096,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 447e91c..ecdfed9 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -32177,32 +32177,6 @@
 </li>
 <li class="blockList">
 <table class="constantsSummary" border="0" cellpadding="3" cellspacing="0" summary="Constant Field Values table, listing constant fields, and values">
-<caption><span>org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html" title="class in org.apache.hadoop.hbase.wal">BoundedRecoveredHFilesOutputSink</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th scope="col">Constant Field</th>
-<th class="colLast" scope="col">Value</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><a name="org.apache.hadoop.hbase.wal.BoundedRecoveredHFilesOutputSink.DEFAULT_WAL_SPLIT_TO_HFILE">
-<!--   -->
-</a><code>public&nbsp;static&nbsp;final&nbsp;boolean</code></td>
-<td><code><a href="org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#DEFAULT_WAL_SPLIT_TO_HFILE">DEFAULT_WAL_SPLIT_TO_HFILE</a></code></td>
-<td class="colLast"><code>false</code></td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><a name="org.apache.hadoop.hbase.wal.BoundedRecoveredHFilesOutputSink.WAL_SPLIT_TO_HFILE">
-<!--   -->
-</a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
-<td><code><a href="org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#WAL_SPLIT_TO_HFILE">WAL_SPLIT_TO_HFILE</a></code></td>
-<td class="colLast"><code>"hbase.wal.split.to.hfile"</code></td>
-</tr>
-</tbody>
-</table>
-</li>
-<li class="blockList">
-<table class="constantsSummary" border="0" cellpadding="3" cellspacing="0" summary="Constant Field Values table, listing constant fields, and values">
 <caption><span>org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/NettyAsyncFSWALConfigHelper.html" title="class in org.apache.hadoop.hbase.wal">NettyAsyncFSWALConfigHelper</a></span><span class="tabEnd">&nbsp;</span></caption>
 <tr>
 <th class="colFirst" scope="col">Modifier and Type</th>
@@ -32386,33 +32360,47 @@
 </tr>
 <tbody>
 <tr class="altColor">
+<td class="colFirst"><a name="org.apache.hadoop.hbase.wal.WALSplitter.DEFAULT_WAL_SPLIT_TO_HFILE">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;boolean</code></td>
+<td><code><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#DEFAULT_WAL_SPLIT_TO_HFILE">DEFAULT_WAL_SPLIT_TO_HFILE</a></code></td>
+<td class="colLast"><code>false</code></td>
+</tr>
+<tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.wal.WALSplitter.SPLIT_SKIP_ERRORS_DEFAULT">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;boolean</code></td>
 <td><code><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#SPLIT_SKIP_ERRORS_DEFAULT">SPLIT_SKIP_ERRORS_DEFAULT</a></code></td>
 <td class="colLast"><code>false</code></td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.wal.WALSplitter.SPLIT_WAL_BUFFER_SIZE">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#SPLIT_WAL_BUFFER_SIZE">SPLIT_WAL_BUFFER_SIZE</a></code></td>
 <td class="colLast"><code>"hbase.regionserver.hlog.splitlog.buffersize"</code></td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.wal.WALSplitter.SPLIT_WAL_WRITER_THREADS">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#SPLIT_WAL_WRITER_THREADS">SPLIT_WAL_WRITER_THREADS</a></code></td>
 <td class="colLast"><code>"hbase.regionserver.hlog.splitlog.writer.threads"</code></td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.wal.WALSplitter.SPLIT_WRITER_CREATION_BOUNDED">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#SPLIT_WRITER_CREATION_BOUNDED">SPLIT_WRITER_CREATION_BOUNDED</a></code></td>
 <td class="colLast"><code>"hbase.split.writer.creation.bounded"</code></td>
 </tr>
+<tr class="rowColor">
+<td class="colFirst"><a name="org.apache.hadoop.hbase.wal.WALSplitter.WAL_SPLIT_TO_HFILE">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
+<td><code><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#WAL_SPLIT_TO_HFILE">WAL_SPLIT_TO_HFILE</a></code></td>
+<td class="colLast"><code>"hbase.wal.split.to.hfile"</code></td>
+</tr>
 </tbody>
 </table>
 </li>
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index f28c052..8aa0be0 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -7139,7 +7139,9 @@
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/BoundedRecoveredEditsOutputSink.html#BoundedRecoveredEditsOutputSink-org.apache.hadoop.hbase.wal.WALSplitter-org.apache.hadoop.hbase.wal.WALSplitter.PipelineController-org.apache.hadoop.hbase.wal.EntryBuffers-int-">BoundedRecoveredEditsOutputSink(WALSplitter, WALSplitter.PipelineController, EntryBuffers, int)</a></span> - Constructor for class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/BoundedReco [...]
 <dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html" title="class in org.apache.hadoop.hbase.wal"><span class="typeNameLink">BoundedRecoveredHFilesOutputSink</span></a> - Class in <a href="org/apache/hadoop/hbase/wal/package-summary.html">org.apache.hadoop.hbase.wal</a></dt>
-<dd>&nbsp;</dd>
+<dd>
+<div class="block">A WALSplitter sink that outputs <a href="org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>s.</div>
+</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#BoundedRecoveredHFilesOutputSink-org.apache.hadoop.hbase.wal.WALSplitter-org.apache.hadoop.hbase.wal.WALSplitter.PipelineController-org.apache.hadoop.hbase.wal.EntryBuffers-int-">BoundedRecoveredHFilesOutputSink(WALSplitter, WALSplitter.PipelineController, EntryBuffers, int)</a></span> - Constructor for class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/BoundedR [...]
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidator.html#branch1">branch1</a></span> - Variable in class org.apache.hadoop.hbase.tool.coprocessor.<a href="org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidator.html" title="class in org.apache.hadoop.hbase.tool.coprocessor">CoprocessorValidator</a></dt>
@@ -24730,7 +24732,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/WALFactory.html#DEFAULT_WAL_PROVIDER">DEFAULT_WAL_PROVIDER</a></span> - Static variable in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#DEFAULT_WAL_SPLIT_TO_HFILE">DEFAULT_WAL_SPLIT_TO_HFILE</a></span> - Static variable in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html" title="class in org.apache.hadoop.hbase.wal">BoundedRecoveredHFilesOutputSink</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#DEFAULT_WAL_SPLIT_TO_HFILE">DEFAULT_WAL_SPLIT_TO_HFILE</a></span> - Static variable in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/HConstants.html#DEFAULT_WAL_STORAGE_POLICY">DEFAULT_WAL_STORAGE_POLICY</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/HConstants.html" title="class in org.apache.hadoop.hbase">HConstants</a></dt>
 <dd>
@@ -113178,7 +113180,9 @@ service.</div>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/RegionSplitter.UniformSplit.html#split-byte:A-byte:A-int-boolean-">split(byte[], byte[], int, boolean)</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/RegionSplitter.UniformSplit.html" title="class in org.apache.hadoop.hbase.util">RegionSplitter.UniformSplit</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#split-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.wal.WALFactory-">split(Path, Path, Path, FileSystem, Configuration, WALFactory)</a></span> - Static method in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.ha [...]
-<dd>&nbsp;</dd>
+<dd>
+<div class="block">Split a folder of WAL files.</div>
+</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/RegionSplitter.NumberStringSplit.html#split2-java.math.BigInteger-java.math.BigInteger-">split2(BigInteger, BigInteger)</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/RegionSplitter.NumberStringSplit.html" title="class in org.apache.hadoop.hbase.util">RegionSplitter.NumberStringSplit</a></dt>
 <dd>
 <div class="block">Divide 2 numbers in half (for split algorithm)</div>
@@ -113264,7 +113268,9 @@ service.</div>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#SPLIT_WAL_WRITER_THREADS">SPLIT_WAL_WRITER_THREADS</a></span> - Static variable in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#SPLIT_WRITER_CREATION_BOUNDED">SPLIT_WRITER_CREATION_BOUNDED</a></span> - Static variable in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter</a></dt>
-<dd>&nbsp;</dd>
+<dd>
+<div class="block">True if we are to run with bounded amount of writers rather than let the count blossom.</div>
+</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/HbckRegionInfo.MetaEntry.html#splitA">splitA</a></span> - Variable in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/HbckRegionInfo.MetaEntry.html" title="class in org.apache.hadoop.hbase.util">HbckRegionInfo.MetaEntry</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/HConstants.html#SPLITA_QUALIFIER">SPLITA_QUALIFIER</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/HConstants.html" title="class in org.apache.hadoop.hbase">HConstants</a></dt>
@@ -113337,11 +113343,11 @@ service.</div>
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#splitLogFile-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.FileStatus-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.util.CancelableProgressable-org.apache.hadoop.hbase.regionserver.LastSequenceId-org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination-org.apache.hadoop.hbase.wal.WALFactory-org.apache.hadoop.hbase.regionserver.RegionServerServices- [...]
 <dd>
-<div class="block">Splits a WAL file into region's recovered-edits directory.</div>
+<div class="block">Splits a WAL file.</div>
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#splitLogFile-org.apache.hadoop.fs.FileStatus-org.apache.hadoop.hbase.util.CancelableProgressable-">splitLogFile(FileStatus, CancelableProgressable)</a></span> - Method in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter</a></dt>
 <dd>
-<div class="block">log splitting implementation, splits one log file.</div>
+<div class="block">WAL splitting implementation, splits one log file.</div>
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/MasterWalManager.html#splitLogLock">splitLogLock</a></span> - Variable in class org.apache.hadoop.hbase.master.<a href="org/apache/hadoop/hbase/master/MasterWalManager.html" title="class in org.apache.hadoop.hbase.master">MasterWalManager</a></dt>
 <dd>&nbsp;</dd>
@@ -113436,7 +113442,9 @@ service.</div>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/coordination/ZkCoordinatedStateManager.html#splitLogWorkerCoordination">splitLogWorkerCoordination</a></span> - Variable in class org.apache.hadoop.hbase.coordination.<a href="org/apache/hadoop/hbase/coordination/ZkCoordinatedStateManager.html" title="class in org.apache.hadoop.hbase.coordination">ZkCoordinatedStateManager</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#splitLogWorkerCoordination">splitLogWorkerCoordination</a></span> - Variable in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter</a></dt>
-<dd>&nbsp;</dd>
+<dd>
+<div class="block">Coordinator for split log.</div>
+</dd>
 <dt><a href="org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.SplitTaskDetails.html" title="interface in org.apache.hadoop.hbase.coordination"><span class="typeNameLink">SplitLogWorkerCoordination.SplitTaskDetails</span></a> - Interface in <a href="org/apache/hadoop/hbase/coordination/package-summary.html">org.apache.hadoop.hbase.coordination</a></dt>
 <dd>
 <div class="block">Interface for log-split tasks Used to carry implementation details in encapsulated way through
@@ -113568,7 +113576,9 @@ service.</div>
  where n == number of added ranges.</div>
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/OutputSink.html#splits">splits</a></span> - Variable in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/OutputSink.html" title="class in org.apache.hadoop.hbase.wal">OutputSink</a></dt>
-<dd>&nbsp;</dd>
+<dd>
+<div class="block">List of all the files produced by this sink</div>
+</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/RegionSplitter.html#splitScan-java.util.LinkedList-org.apache.hadoop.hbase.client.Connection-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.util.RegionSplitter.SplitAlgorithm-">splitScan(LinkedList&lt;Pair&lt;byte[], byte[]&gt;&gt;, Connection, TableName, RegionSplitter.SplitAlgorithm)</a></span> - Static method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/RegionSplitter.html" [...]
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/MetricsMasterFilesystemSourceImpl.html#splitSizeHisto">splitSizeHisto</a></span> - Variable in class org.apache.hadoop.hbase.master.<a href="org/apache/hadoop/hbase/master/MetricsMasterFilesystemSourceImpl.html" title="class in org.apache.hadoop.hbase.master">MetricsMasterFilesystemSourceImpl</a></dt>
@@ -128208,8 +128218,10 @@ the order they are declared.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/AbstractWALRoller.html#WAL_ROLL_PERIOD_KEY">WAL_ROLL_PERIOD_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/AbstractWALRoller.html" title="class in org.apache.hadoop.hbase.wal">AbstractWALRoller</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#WAL_SPLIT_TO_HFILE">WAL_SPLIT_TO_HFILE</a></span> - Static variable in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html" title="class in org.apache.hadoop.hbase.wal">BoundedRecoveredHFilesOutputSink</a></dt>
-<dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#WAL_SPLIT_TO_HFILE">WAL_SPLIT_TO_HFILE</a></span> - Static variable in class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter</a></dt>
+<dd>
+<div class="block">Split WAL directly to hfiles instead of into intermediary 'recovered.edits' files.</div>
+</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/HConstants.html#WAL_STORAGE_POLICY">WAL_STORAGE_POLICY</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/HConstants.html" title="class in org.apache.hadoop.hbase">HConstants</a></dt>
 <dd>
 <div class="block">Configuration name of WAL storage policy
@@ -128704,9 +128716,7 @@ the order they are declared.</div>
 <dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALSplitter</span></a> - Class in <a href="org/apache/hadoop/hbase/wal/package-summary.html">org.apache.hadoop.hbase.wal</a></dt>
 <dd>
-<div class="block">This class is responsible for splitting up a bunch of regionserver commit log
- files that are no longer being written to, into new files, one per region, for
- recovering data on startup.</div>
+<div class="block">Split RegionServer WAL files.</div>
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/WALSplitter.html#WALSplitter-org.apache.hadoop.hbase.wal.WALFactory-org.apache.hadoop.conf.Configuration-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.hbase.regionserver.LastSequenceId-org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination-org.apache.hadoop.hbase.regionserver.RegionServerServices-">WALSplitter(WALFactory, C [...]
 <dd>&nbsp;</dd>
diff --git a/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogWorkerCoordination.html b/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogWorkerCoordination.html
index 89b5a6c..053704d 100644
--- a/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogWorkerCoordination.html
+++ b/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogWorkerCoordination.html
@@ -239,7 +239,9 @@
 <tbody>
 <tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../../org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html" title="interface in org.apache.hadoop.hbase.coordination">SplitLogWorkerCoordination</a></code></td>
-<td class="colLast"><span class="typeNameLabel">WALSplitter.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#splitLogWorkerCoordination">splitLogWorkerCoordination</a></span></code>&nbsp;</td>
+<td class="colLast"><span class="typeNameLabel">WALSplitter.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#splitLogWorkerCoordination">splitLogWorkerCoordination</a></span></code>
+<div class="block">Coordinator for split log.</div>
+</td>
 </tr>
 </tbody>
 </table>
@@ -261,7 +263,7 @@
             <a href="../../../../../../org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html" title="interface in org.apache.hadoop.hbase.coordination">SplitLogWorkerCoordination</a>&nbsp;splitLogWorkerCoordination,
             <a href="../../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory,
             <a href="../../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a>&nbsp;rsServices)</code>
-<div class="block">Splits a WAL file into region's recovered-edits directory.</div>
+<div class="block">Splits a WAL file.</div>
 </td>
 </tr>
 </tbody>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/LastSequenceId.html b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/LastSequenceId.html
index 53e2321..f8b1c98 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/LastSequenceId.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/LastSequenceId.html
@@ -210,7 +210,7 @@
             <a href="../../../../../../org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html" title="interface in org.apache.hadoop.hbase.coordination">SplitLogWorkerCoordination</a>&nbsp;splitLogWorkerCoordination,
             <a href="../../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory,
             <a href="../../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a>&nbsp;rsServices)</code>
-<div class="block">Splits a WAL file into region's recovered-edits directory.</div>
+<div class="block">Splits a WAL file.</div>
 </td>
 </tr>
 </tbody>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerServices.html b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerServices.html
index f7fc002..3bfc9c5 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/class-use/RegionServerServices.html
@@ -1030,7 +1030,7 @@
             <a href="../../../../../../org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html" title="interface in org.apache.hadoop.hbase.coordination">SplitLogWorkerCoordination</a>&nbsp;splitLogWorkerCoordination,
             <a href="../../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory,
             <a href="../../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a>&nbsp;rsServices)</code>
-<div class="block">Splits a WAL file into region's recovered-edits directory.</div>
+<div class="block">Splits a WAL file.</div>
 </td>
 </tr>
 </tbody>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html b/devapidocs/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html
index fd2bb00..1e7e143 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>class <a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html#line.205">HBaseSaslRpcClient.WrappedInputStream</a>
+<pre>class <a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html#line.215">HBaseSaslRpcClient.WrappedInputStream</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/FilterInputStream.html?is-external=true" title="class or interface in java.io">FilterInputStream</a></pre>
 </li>
 </ul>
@@ -243,7 +243,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/FilterInputSt
 <ul class="blockListLast">
 <li class="blockList">
 <h4>unwrappedRpcBuffer</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html#line.206">unwrappedRpcBuffer</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html#line.216">unwrappedRpcBuffer</a></pre>
 </li>
 </ul>
 </li>
@@ -260,7 +260,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/FilterInputSt
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WrappedInputStream</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html#line.207">WrappedInputStream</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a>&nbsp;in)
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html#line.217">WrappedInputStream</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a>&nbsp;in)
                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -282,7 +282,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/FilterInputSt
 <ul class="blockList">
 <li class="blockList">
 <h4>read</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html#line.212">read</a>()
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html#line.222">read</a>()
          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
@@ -298,7 +298,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/FilterInputSt
 <ul class="blockList">
 <li class="blockList">
 <h4>read</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html#line.219">read</a>(byte[]&nbsp;b)
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html#line.229">read</a>(byte[]&nbsp;b)
          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
@@ -314,7 +314,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/FilterInputSt
 <ul class="blockList">
 <li class="blockList">
 <h4>read</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html#line.224">read</a>(byte[]&nbsp;buf,
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html#line.234">read</a>(byte[]&nbsp;buf,
                 int&nbsp;off,
                 int&nbsp;len)
          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -332,7 +332,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/FilterInputSt
 <ul class="blockListLast">
 <li class="blockList">
 <h4>readNextRpcPacket</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html#line.236">readNextRpcPacket</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html#line.246">readNextRpcPacket</a>()
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedOutputStream.html b/devapidocs/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedOutputStream.html
index ba8bdcb..9fda538 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedOutputStream.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedOutputStream.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>class <a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html#line.268">HBaseSaslRpcClient.WrappedOutputStream</a>
+<pre>class <a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html#line.278">HBaseSaslRpcClient.WrappedOutputStream</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/FilterOutputStream.html?is-external=true" title="class or interface in java.io">FilterOutputStream</a></pre>
 </li>
 </ul>
@@ -220,7 +220,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/FilterOutputS
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WrappedOutputStream</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedOutputStream.html#line.269">WrappedOutputStream</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true" title="class or interface in java.io">OutputStream</a>&nbsp;out)
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedOutputStream.html#line.279">WrappedOutputStream</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true" title="class or interface in java.io">OutputStream</a>&nbsp;out)
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -242,7 +242,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/FilterOutputS
 <ul class="blockListLast">
 <li class="blockList">
 <h4>write</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedOutputStream.html#line.273">write</a>(byte[]&nbsp;buf,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedOutputStream.html#line.283">write</a>(byte[]&nbsp;buf,
                   int&nbsp;off,
                   int&nbsp;len)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html b/devapidocs/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html
index 1d888af..bea41dc 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html
@@ -476,7 +476,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/security/AbstractHBaseSa
 <ul class="blockList">
 <li class="blockList">
 <h4>getSaslQOP</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html#line.178">getSaslQOP</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html#line.188">getSaslQOP</a>()</pre>
 </li>
 </ul>
 <a name="initCryptoCipher-org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta-org.apache.hadoop.conf.Configuration-">
@@ -485,7 +485,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/security/AbstractHBaseSa
 <ul class="blockList">
 <li class="blockList">
 <h4>initCryptoCipher</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html#line.182">initCryptoCipher</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta&nbsp;cryptoCipherMeta,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html#line.192">initCryptoCipher</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta&nbsp;cryptoCipherMeta,
                              org.apache.hadoop.conf.Configuration&nbsp;conf)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -500,7 +500,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/security/AbstractHBaseSa
 <ul class="blockList">
 <li class="blockList">
 <h4>getInputStream</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html#line.194">getInputStream</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html#line.204">getInputStream</a>()
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get a SASL wrapped InputStream. Can be called only after saslConnect() has been called.</div>
 <dl>
@@ -517,7 +517,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/security/AbstractHBaseSa
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getOutputStream</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true" title="class or interface in java.io">OutputStream</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html#line.257">getOutputStream</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true" title="class or interface in java.io">OutputStream</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html#line.267">getOutputStream</a>()
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Get a SASL wrapped OutputStream. Can be called only after saslConnect() has been called.</div>
 <dl>
diff --git a/devapidocs/org/apache/hadoop/hbase/util/class-use/CancelableProgressable.html b/devapidocs/org/apache/hadoop/hbase/util/class-use/CancelableProgressable.html
index 84293f2..84e0a07 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/class-use/CancelableProgressable.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/class-use/CancelableProgressable.html
@@ -444,7 +444,7 @@
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><span class="typeNameLabel">WALSplitter.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#splitLogFile-org.apache.hadoop.fs.FileStatus-org.apache.hadoop.hbase.util.CancelableProgressable-">splitLogFile</a></span>(org.apache.hadoop.fs.FileStatus&nbsp;logfile,
             <a href="../../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html" title="interface in org.apache.hadoop.hbase.util">CancelableProgressable</a>&nbsp;reporter)</code>
-<div class="block">log splitting implementation, splits one log file.</div>
+<div class="block">WAL splitting implementation, splits one log file.</div>
 </td>
 </tr>
 <tr class="rowColor">
@@ -458,7 +458,7 @@
             <a href="../../../../../../org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html" title="interface in org.apache.hadoop.hbase.coordination">SplitLogWorkerCoordination</a>&nbsp;splitLogWorkerCoordination,
             <a href="../../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory,
             <a href="../../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a>&nbsp;rsServices)</code>
-<div class="block">Splits a WAL file into region's recovered-edits directory.</div>
+<div class="block">Splits a WAL file.</div>
 </td>
 </tr>
 </tbody>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html b/devapidocs/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html
index 749315d..4d4f7c2 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html
@@ -115,8 +115,15 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.55">BoundedRecoveredHFilesOutputSink</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.59">BoundedRecoveredHFilesOutputSink</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" title="class in org.apache.hadoop.hbase.wal">OutputSink</a></pre>
+<div class="block">A WALSplitter sink that outputs <a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>s.
+ Runs with a bounded number of HFile writers at any one time rather than let the count run up.</div>
+<dl>
+<dt><span class="seeLabel">See Also:</span></dt>
+<dd><a href="../../../../../org/apache/hadoop/hbase/wal/BoundedRecoveredEditsOutputSink.html" title="class in org.apache.hadoop.hbase.wal"><code>for a sink implementation that writes intermediate
+   recovered.edits files.</code></a></dd>
+</dl>
 </li>
 </ul>
 </div>
@@ -151,29 +158,21 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <th class="colLast" scope="col">Field and Description</th>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>static boolean</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#DEFAULT_WAL_SPLIT_TO_HFILE">DEFAULT_WAL_SPLIT_TO_HFILE</a></span></code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
 <td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#openingWritersNum">openingWritersNum</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long< [...]
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#regionEditsWrittenMap">regionEditsWrittenMap</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#tableDescCache">tableDescCache</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
-<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#WAL_SPLIT_TO_HFILE">WAL_SPLIT_TO_HFILE</a></span></code>&nbsp;</td>
-</tr>
 <tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#walSplitter">walSplitter</a></span></code>&nbsp;</td>
@@ -304,33 +303,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.56">LOG</a></pre>
-</li>
-</ul>
-<a name="WAL_SPLIT_TO_HFILE">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>WAL_SPLIT_TO_HFILE</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.58">WAL_SPLIT_TO_HFILE</a></pre>
-<dl>
-<dt><span class="seeLabel">See Also:</span></dt>
-<dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.wal.BoundedRecoveredHFilesOutputSink.WAL_SPLIT_TO_HFILE">Constant Field Values</a></dd>
-</dl>
-</li>
-</ul>
-<a name="DEFAULT_WAL_SPLIT_TO_HFILE">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>DEFAULT_WAL_SPLIT_TO_HFILE</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.59">DEFAULT_WAL_SPLIT_TO_HFILE</a></pre>
-<dl>
-<dt><span class="seeLabel">See Also:</span></dt>
-<dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.wal.BoundedRecoveredHFilesOutputSink.DEFAULT_WAL_SPLIT_TO_HFILE">Constant Field Values</a></dd>
-</dl>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.60">LOG</a></pre>
 </li>
 </ul>
 <a name="walSplitter">
@@ -339,7 +312,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>walSplitter</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.61">walSplitter</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.62">walSplitter</a></pre>
 </li>
 </ul>
 <a name="regionEditsWrittenMap">
@@ -348,7 +321,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>regionEditsWrittenMap</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt; <a href=" [...]
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt; <a href=" [...]
 </li>
 </ul>
 <a name="openingWritersNum">
@@ -357,7 +330,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>openingWritersNum</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.67">openingWritersNum</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.68">openingWritersNum</a></pre>
 </li>
 </ul>
 <a name="tableDescCache">
@@ -366,7 +339,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>tableDescCache</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&gt; <a href="../../ [...]
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>,<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&gt; <a href="../../ [...]
 </li>
 </ul>
 </li>
@@ -383,7 +356,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BoundedRecoveredHFilesOutputSink</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.71">BoundedRecoveredHFilesOutputSink</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter</a>&nbsp;walSplitter,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.72">BoundedRecoveredHFilesOutputSink</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter</a>&nbsp;walSplitter,
                                         <a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter.PipelineController</a>&nbsp;controller,
                                         <a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers</a>&nbsp;entryBuffers,
                                         int&nbsp;numWriters)</pre>
@@ -403,7 +376,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>append</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.79">append</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.RegionEntryBuffer.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers.RegionEntryBuffer</a>&nbsp;buffer)
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.80">append</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.RegionEntryBuffer.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers.RegionEntryBuffer</a>&nbsp;buffer)
      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -421,7 +394,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.126">close</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.131">close</a>()
                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -437,7 +410,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>writeRemainingEntryBuffers</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.141">writeRemainingEntryBuffers</a>()
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.146">writeRemainingEntryBuffers</a>()
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Write out the remaining RegionEntryBuffers and close the writers.</div>
 <dl>
@@ -454,7 +427,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>getOutputCounts</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/had [...]
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/had [...]
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html#getOutputCounts--">getOutputCounts</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" title="class in org.apache.hadoop.hbase.wal">OutputSink</a></code></dd>
@@ -469,7 +442,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumberOfRecoveredRegions</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.175">getNumberOfRecoveredRegions</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.180">getNumberOfRecoveredRegions</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html#getNumberOfRecoveredRegions--">getNumberOfRecoveredRegions</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" title="class in org.apache.hadoop.hbase.wal">OutputSink</a></code></dd>
@@ -484,7 +457,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumOpenWriters</h4>
-<pre>int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.180">getNumOpenWriters</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.185">getNumOpenWriters</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html#getNumOpenWriters--">getNumOpenWriters</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" title="class in org.apache.hadoop.hbase.wal">OutputSink</a></code></dd>
@@ -499,7 +472,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>keepRegionEvent</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.185">keepRegionEvent</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Entry.html" title="class in org.apache.hadoop.hbase.wal">WAL.Entry</a>&nbsp;entry)</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.190">keepRegionEvent</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Entry.html" title="class in org.apache.hadoop.hbase.wal">WAL.Entry</a>&nbsp;entry)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html#keepRegionEvent-org.apache.hadoop.hbase.wal.WAL.Entry-">OutputSink</a></code></span></div>
 <div class="block">Some WALEdit's contain only KV's for account on what happened to a region. Not all sinks will
  want to get all of those edits.</div>
@@ -517,7 +490,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>createRecoveredHFileWriter</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.189">createRecoveredHFileWriter</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.194">createRecoveredHFileWriter</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                                                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;regionName,
                                                    long&nbsp;seqId,
                                                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;familyName,
@@ -535,7 +508,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>createFileContext</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.208">createFileContext</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>&nbsp;cfd,
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFileContext.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileContext</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.213">createFileContext</a>(<a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>&nbsp;cfd,
                                        boolean&nbsp;isMetaTable)
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -550,7 +523,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getTableDescriptor</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.219">getTableDescriptor</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)</pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html#line.224">getTableDescriptor</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)</pre>
 </li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html b/devapidocs/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html
index 0af7a51..7b3beda 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.154">OutputSink.WriterThread</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.157">OutputSink.WriterThread</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true" title="class or interface in java.lang">Thread</a></pre>
 </li>
 </ul>
@@ -266,7 +266,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldStop</h4>
-<pre>private volatile&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.155">shouldStop</a></pre>
+<pre>private volatile&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.158">shouldStop</a></pre>
 </li>
 </ul>
 <a name="controller">
@@ -275,7 +275,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockList">
 <li class="blockList">
 <h4>controller</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter.PipelineController</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.156">controller</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter.PipelineController</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.159">controller</a></pre>
 </li>
 </ul>
 <a name="entryBuffers">
@@ -284,7 +284,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockList">
 <li class="blockList">
 <h4>entryBuffers</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.157">entryBuffers</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.160">entryBuffers</a></pre>
 </li>
 </ul>
 <a name="outputSink">
@@ -293,7 +293,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>outputSink</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" title="class in org.apache.hadoop.hbase.wal">OutputSink</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.158">outputSink</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" title="class in org.apache.hadoop.hbase.wal">OutputSink</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.161">outputSink</a></pre>
 </li>
 </ul>
 </li>
@@ -310,7 +310,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WriterThread</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.160">WriterThread</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter.PipelineController</a>&nbsp;controller,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.163">WriterThread</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter.PipelineController</a>&nbsp;controller,
              <a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers</a>&nbsp;entryBuffers,
              <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" title="class in org.apache.hadoop.hbase.wal">OutputSink</a>&nbsp;sink,
              int&nbsp;i)</pre>
@@ -330,7 +330,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockList">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.169">run</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.172">run</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--" title="class or interface in java.lang">run</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true" title="class or interface in java.lang">Runnable</a></code></dd>
@@ -345,7 +345,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockList">
 <li class="blockList">
 <h4>doRun</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.178">doRun</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.181">doRun</a>()
             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -359,7 +359,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockList">
 <li class="blockList">
 <h4>writeBuffer</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.208">writeBuffer</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.RegionEntryBuffer.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers.RegionEntryBuffer</a>&nbsp;buffer)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.211">writeBuffer</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.RegionEntryBuffer.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers.RegionEntryBuffer</a>&nbsp;buffer)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -373,7 +373,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>finish</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.212">finish</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html#line.215">finish</a>()</pre>
 </li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/OutputSink.html b/devapidocs/org/apache/hadoop/hbase/wal/OutputSink.html
index 1beaa26..c042330 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/OutputSink.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/OutputSink.html
@@ -187,7 +187,9 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html#splits">splits</a></span></code>&nbsp;</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html#splits">splits</a></span></code>
+<div class="block">List of all the files produced by this sink</div>
+</td>
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicLong</a></code></td>
@@ -368,7 +370,8 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>splits</h4>
-<pre>protected final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.59">splits</a></pre>
+<pre>protected final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.62">splits</a></pre>
+<div class="block">List of all the files produced by this sink</div>
 </li>
 </ul>
 <a name="closeThreadPool">
@@ -377,7 +380,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>closeThreadPool</h4>
-<pre>protected final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ThreadPoolExecutor.html?is-external=true" title="class or interface in java.util.concurrent">ThreadPoolExecutor</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.64">closeThreadPool</a></pre>
+<pre>protected final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ThreadPoolExecutor.html?is-external=true" title="class or interface in java.util.concurrent">ThreadPoolExecutor</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.67">closeThreadPool</a></pre>
 <div class="block">Used when close this output sink.</div>
 </li>
 </ul>
@@ -387,7 +390,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>closeCompletionService</h4>
-<pre>protected final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionService.html?is-external=true" title="class or interface in java.util.concurrent">CompletionService</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.65">closeCompletionService</a></pre>
+<pre>protected final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionService.html?is-external=true" title="class or interface in java.util.concurrent">CompletionService</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.68">closeCompletionService</a></pre>
 </li>
 </ul>
 </li>
@@ -404,7 +407,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>OutputSink</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.67">OutputSink</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter.PipelineController</a>&nbsp;controller,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.70">OutputSink</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter.PipelineController</a>&nbsp;controller,
                   <a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers</a>&nbsp;entryBuffers,
                   int&nbsp;numWriters)</pre>
 </li>
@@ -423,7 +426,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setReporter</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.77">setReporter</a>(<a href="../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html" title="interface in org.apache.hadoop.hbase.util">CancelableProgressable</a>&nbsp;reporter)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.80">setReporter</a>(<a href="../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html" title="interface in org.apache.hadoop.hbase.util">CancelableProgressable</a>&nbsp;reporter)</pre>
 </li>
 </ul>
 <a name="startWriterThreads--">
@@ -432,7 +435,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>startWriterThreads</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.84">startWriterThreads</a>()
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.87">startWriterThreads</a>()
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Start the threads that will pump data from the entryBuffers to the output files.</div>
 <dl>
@@ -447,7 +450,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>finishWriterThreads</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.97">finishWriterThreads</a>()
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.100">finishWriterThreads</a>()
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Wait for writer threads to dump all info to the sink</div>
 <dl>
@@ -464,7 +467,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getTotalSkippedEdits</h4>
-<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.121">getTotalSkippedEdits</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.124">getTotalSkippedEdits</a>()</pre>
 </li>
 </ul>
 <a name="getNumOpenWriters--">
@@ -473,7 +476,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumOpenWriters</h4>
-<pre>abstract&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.128">getNumOpenWriters</a>()</pre>
+<pre>abstract&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.131">getNumOpenWriters</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the number of currently opened writers</dd>
@@ -486,7 +489,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>append</h4>
-<pre>abstract&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.133">append</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.RegionEntryBuffer.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers.RegionEntryBuffer</a>&nbsp;buffer)
+<pre>abstract&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.136">append</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.RegionEntryBuffer.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers.RegionEntryBuffer</a>&nbsp;buffer)
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -502,7 +505,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.135">close</a>()
+<pre>abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.138">close</a>()
                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -516,7 +519,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getOutputCounts</h4>
-<pre>abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/h [...]
+<pre>abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/h [...]
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>a map from encoded region ID to the number of edits written out for that region.</dd>
@@ -529,7 +532,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumberOfRecoveredRegions</h4>
-<pre>abstract&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.145">getNumberOfRecoveredRegions</a>()</pre>
+<pre>abstract&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.148">getNumberOfRecoveredRegions</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>number of regions we've recovered</dd>
@@ -542,7 +545,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>keepRegionEvent</h4>
-<pre>abstract&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.152">keepRegionEvent</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Entry.html" title="class in org.apache.hadoop.hbase.wal">WAL.Entry</a>&nbsp;entry)</pre>
+<pre>abstract&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/OutputSink.html#line.155">keepRegionEvent</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Entry.html" title="class in org.apache.hadoop.hbase.wal">WAL.Entry</a>&nbsp;entry)</pre>
 <div class="block">Some WALEdit's contain only KV's for account on what happened to a region. Not all sinks will
  want to get all of those edits.</div>
 <dl>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html b/devapidocs/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html
index 819ee51..799a491 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html
@@ -120,10 +120,17 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.45">RecoveredEditsOutputSink</a>
+class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.47">RecoveredEditsOutputSink</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEditsOutputSink.html" title="class in org.apache.hadoop.hbase.wal">AbstractRecoveredEditsOutputSink</a></pre>
 <div class="block">Class that manages the output streams from the log splitting process.
- Every region only has one recovered edits.</div>
+ Every region only has one recovered edits file PER split WAL (if we split
+ multiple WALs during a log-splitting session, on open, a Region may
+ have multiple recovered.edits files to replay -- one per split WAL).</div>
+<dl>
+<dt><span class="seeLabel">See Also:</span></dt>
+<dd><a href="../../../../../org/apache/hadoop/hbase/wal/BoundedRecoveredEditsOutputSink.html" title="class in org.apache.hadoop.hbase.wal"><code>which is like this class but imposes upper bound on
+   the number of writers active at one time (makes for better throughput).</code></a></dd>
+</dl>
 </li>
 </ul>
 </div>
@@ -290,7 +297,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEdi
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.46">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.48">LOG</a></pre>
 </li>
 </ul>
 <a name="writers">
@@ -299,7 +306,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEdi
 <ul class="blockListLast">
 <li class="blockList">
 <h4>writers</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEditsOutputSink.RecoveredEditsWriter.html" title="class in org.apache.hadoop.hbase [...]
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEditsOutputSink.RecoveredEditsWriter.html" title="class in org.apache.hadoop.hbase [...]
 </li>
 </ul>
 </li>
@@ -316,7 +323,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEdi
 <ul class="blockListLast">
 <li class="blockList">
 <h4>RecoveredEditsOutputSink</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.49">RecoveredEditsOutputSink</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter</a>&nbsp;walSplitter,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.51">RecoveredEditsOutputSink</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter</a>&nbsp;walSplitter,
                                 <a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter.PipelineController</a>&nbsp;controller,
                                 <a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers</a>&nbsp;entryBuffers,
                                 int&nbsp;numWriters)</pre>
@@ -336,7 +343,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEdi
 <ul class="blockList">
 <li class="blockList">
 <h4>append</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.55">append</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.RegionEntryBuffer.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers.RegionEntryBuffer</a>&nbsp;buffer)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.57">append</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.RegionEntryBuffer.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers.RegionEntryBuffer</a>&nbsp;buffer)
             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -354,7 +361,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEdi
 <ul class="blockList">
 <li class="blockList">
 <h4>getRecoveredEditsWriter</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEditsOutputSink.RecoveredEditsWriter.html" title="class in org.apache.hadoop.hbase.wal">AbstractRecoveredEditsOutputSink.RecoveredEditsWriter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.74">getRecoveredEditsWriter</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;ta [...]
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEditsOutputSink.RecoveredEditsWriter.html" title="class in org.apache.hadoop.hbase.wal">AbstractRecoveredEditsOutputSink.RecoveredEditsWriter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.76">getRecoveredEditsWriter</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;ta [...]
                                                                                       byte[]&nbsp;region,
                                                                                       long&nbsp;seqId)
                                                                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -374,7 +381,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEdi
 <ul class="blockList">
 <li class="blockList">
 <h4>close</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.89">close</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.92">close</a>()
                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -390,7 +397,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEdi
 <ul class="blockList">
 <li class="blockList">
 <h4>closeWriters</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.104">closeWriters</a>()
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.107">closeWriters</a>()
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Close all of the output streams.</div>
 <dl>
@@ -407,7 +414,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEdi
 <ul class="blockList">
 <li class="blockList">
 <h4>getOutputCounts</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/had [...]
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/had [...]
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html#getOutputCounts--">getOutputCounts</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" title="class in org.apache.hadoop.hbase.wal">OutputSink</a></code></dd>
@@ -422,7 +429,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEdi
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumberOfRecoveredRegions</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.147">getNumberOfRecoveredRegions</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.151">getNumberOfRecoveredRegions</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html#getNumberOfRecoveredRegions--">getNumberOfRecoveredRegions</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" title="class in org.apache.hadoop.hbase.wal">OutputSink</a></code></dd>
@@ -437,7 +444,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/wal/AbstractRecoveredEdi
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getNumOpenWriters</h4>
-<pre>int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.152">getNumOpenWriters</a>()</pre>
+<pre>int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html#line.156">getNumOpenWriters</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html#getNumOpenWriters--">getNumOpenWriters</a></code>&nbsp;in class&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" title="class in org.apache.hadoop.hbase.wal">OutputSink</a></code></dd>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html b/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
index 10d5428..fc99e3a 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
@@ -121,7 +121,7 @@
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.528">WALSplitter.CorruptedLogFileException</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.544">WALSplitter.CorruptedLogFileException</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 </li>
 </ul>
@@ -211,7 +211,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>serialVersionUID</h4>
-<pre>private static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html#line.529">serialVersionUID</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html#line.545">serialVersionUID</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.wal.WALSplitter.CorruptedLogFileException.serialVersionUID">Constant Field Values</a></dd>
@@ -232,7 +232,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.h
 <ul class="blockList">
 <li class="blockList">
 <h4>CorruptedLogFileException</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html#line.531">CorruptedLogFileException</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;s)</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html#line.547">CorruptedLogFileException</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;s)</pre>
 </li>
 </ul>
 <a name="CorruptedLogFileException-java.lang.String-java.lang.Throwable-">
@@ -241,7 +241,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CorruptedLogFileException</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html#line.541">CorruptedLogFileException</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html#line.557">CorruptedLogFileException</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;message,
                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&nbsp;cause)</pre>
 <div class="block">CorruptedLogFileException with cause</div>
 <dl>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html b/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
index f71c79f..354df31 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.499">WALSplitter.PipelineController</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.515">WALSplitter.PipelineController</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Contains some methods to control WAL-entries producer / consumer interactions</div>
 </li>
@@ -212,7 +212,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>thrown</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicReference</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html#line.502">thrown</a></pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicReference</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html#line.518">thrown</a></pre>
 </li>
 </ul>
 <a name="dataAvailable">
@@ -221,7 +221,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>dataAvailable</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html#line.506">dataAvailable</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html#line.522">dataAvailable</a></pre>
 </li>
 </ul>
 </li>
@@ -238,7 +238,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>PipelineController</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html#line.499">PipelineController</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html#line.515">PipelineController</a>()</pre>
 </li>
 </ul>
 </li>
@@ -255,7 +255,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>writerThreadError</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html#line.508">writerThreadError</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&nbsp;t)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html#line.524">writerThreadError</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&nbsp;t)</pre>
 </li>
 </ul>
 <a name="checkForErrors--">
@@ -264,7 +264,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>checkForErrors</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html#line.515">checkForErrors</a>()
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html#line.531">checkForErrors</a>()
              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Check for errors in the writer threads. If any is found, rethrow it.</div>
 <dl>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.html b/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.html
index cd4c2a7..43a9e10 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/WALSplitter.html
@@ -110,11 +110,14 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.80">WALSplitter</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.78">WALSplitter</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
-<div class="block">This class is responsible for splitting up a bunch of regionserver commit log
- files that are no longer being written to, into new files, one per region, for
- recovering data on startup. Delete the old log files when finished.</div>
+<div class="block">Split RegionServer WAL files. Splits the WAL into new files,
+ one per region, to be picked up on Region reopen. Deletes the split WAL when finished.
+ See <a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#split-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.wal.WALFactory-"><code>split(Path, Path, Path, FileSystem, Configuration, WALFactory)</code></a> or
+ <a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#splitLogFile-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.FileStatus-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.util.CancelableProgressable-org.apache.hadoop.hbase.regionserver.LastSequenceId-org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination-org.apache.hadoop.hbase.wal.WALFactory-org.apache.hadoop.hbase.regionserver.RegionServerServices-"><code>splitLogF [...]
+   LastSequenceId, SplitLogWorkerCoordination, WALFactory, RegionServerServices)</code></a> for
+   entry-point.</div>
 </li>
 </ul>
 </div>
@@ -163,79 +166,93 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#conf">conf</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
+<td class="colFirst"><code>static boolean</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#DEFAULT_WAL_SPLIT_TO_HFILE">DEFAULT_WAL_SPLIT_TO_HFILE</a></span></code>&nbsp;</td>
+</tr>
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#entryBuffers">entryBuffers</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private org.apache.hadoop.fs.FileStatus</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#fileBeingSplit">fileBeingSplit</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#lastFlushedSequenceIds">lastFlushedSequenceIds</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" title="class in org.apache.hadoop.hbase.wal">OutputSink</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#outputSink">outputSink</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle [...]
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#regionMaxSeqIdInStores">regionMaxSeqIdInStores</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#rootDir">rootDir</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>(package private) org.apache.hadoop.fs.FileSystem</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#rootFS">rootFS</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#rsServices">rsServices</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>protected <a href="../../../../../org/apache/hadoop/hbase/regionserver/LastSequenceId.html" title="interface in org.apache.hadoop.hbase.regionserver">LastSequenceId</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#sequenceIdChecker">sequenceIdChecker</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#SPLIT_SKIP_ERRORS_DEFAULT">SPLIT_SKIP_ERRORS_DEFAULT</a></span></code>
 <div class="block">By default we retry errors in splitting, rather than skipping.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#SPLIT_WAL_BUFFER_SIZE">SPLIT_WAL_BUFFER_SIZE</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#SPLIT_WAL_WRITER_THREADS">SPLIT_WAL_WRITER_THREADS</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#SPLIT_WRITER_CREATION_BOUNDED">SPLIT_WRITER_CREATION_BOUNDED</a></span></code>&nbsp;</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#SPLIT_WRITER_CREATION_BOUNDED">SPLIT_WRITER_CREATION_BOUNDED</a></span></code>
+<div class="block">True if we are to run with bounded amount of writers rather than let the count blossom.</div>
+</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html" title="interface in org.apache.hadoop.hbase.coordination">SplitLogWorkerCoordination</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#splitLogWorkerCoordination">splitLogWorkerCoordination</a></span></code>&nbsp;</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#splitLogWorkerCoordination">splitLogWorkerCoordination</a></span></code>
+<div class="block">Coordinator for split log.</div>
+</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#status">status</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>(package private) <a href="../../../../../org/apache/hadoop/hbase/TableDescriptors.html" title="interface in org.apache.hadoop.hbase">TableDescriptors</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#tableDescriptors">tableDescriptors</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#tmpDirName">tmpDirName</a></span></code>&nbsp;</td>
 </tr>
+<tr class="altColor">
+<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#WAL_SPLIT_TO_HFILE">WAL_SPLIT_TO_HFILE</a></span></code>
+<div class="block">Split WAL directly to hfiles instead of into intermediary 'recovered.edits' files.</div>
+</td>
+</tr>
 <tr class="rowColor">
 <td class="colFirst"><code>protected org.apache.hadoop.fs.Path</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#walDir">walDir</a></span></code>&nbsp;</td>
@@ -349,13 +366,15 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
      org.apache.hadoop.fs.Path&nbsp;oldLogDir,
      org.apache.hadoop.fs.FileSystem&nbsp;walFS,
      org.apache.hadoop.conf.Configuration&nbsp;conf,
-     <a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory)</code>&nbsp;</td>
+     <a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory)</code>
+<div class="block">Split a folder of WAL files.</div>
+</td>
 </tr>
 <tr id="i11" class="rowColor">
 <td class="colFirst"><code>(package private) boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html#splitLogFile-org.apache.hadoop.fs.FileStatus-org.apache.hadoop.hbase.util.CancelableProgressable-">splitLogFile</a></span>(org.apache.hadoop.fs.FileStatus&nbsp;logfile,
             <a href="../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html" title="interface in org.apache.hadoop.hbase.util">CancelableProgressable</a>&nbsp;reporter)</code>
-<div class="block">log splitting implementation, splits one log file.</div>
+<div class="block">WAL splitting implementation, splits one log file.</div>
 </td>
 </tr>
 <tr id="i12" class="altColor">
@@ -369,7 +388,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             <a href="../../../../../org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html" title="interface in org.apache.hadoop.hbase.coordination">SplitLogWorkerCoordination</a>&nbsp;splitLogWorkerCoordination,
             <a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory,
             <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a>&nbsp;rsServices)</code>
-<div class="block">Splits a WAL file into region's recovered-edits directory.</div>
+<div class="block">Splits a WAL file.</div>
 </td>
 </tr>
 </table>
@@ -400,7 +419,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.81">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.79">LOG</a></pre>
 </li>
 </ul>
 <a name="SPLIT_SKIP_ERRORS_DEFAULT">
@@ -409,7 +428,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>SPLIT_SKIP_ERRORS_DEFAULT</h4>
-<pre>public static final&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.84">SPLIT_SKIP_ERRORS_DEFAULT</a></pre>
+<pre>public static final&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.82">SPLIT_SKIP_ERRORS_DEFAULT</a></pre>
 <div class="block">By default we retry errors in splitting, rather than skipping.</div>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
@@ -423,7 +442,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>walDir</h4>
-<pre>protected final&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.87">walDir</a></pre>
+<pre>protected final&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.85">walDir</a></pre>
 </li>
 </ul>
 <a name="walFS">
@@ -432,7 +451,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>walFS</h4>
-<pre>protected final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.88">walFS</a></pre>
+<pre>protected final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.86">walFS</a></pre>
 </li>
 </ul>
 <a name="conf">
@@ -441,7 +460,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>protected final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.89">conf</a></pre>
+<pre>protected final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.87">conf</a></pre>
 </li>
 </ul>
 <a name="rootDir">
@@ -450,7 +469,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>rootDir</h4>
-<pre>final&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.90">rootDir</a></pre>
+<pre>final&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.88">rootDir</a></pre>
 </li>
 </ul>
 <a name="rootFS">
@@ -459,7 +478,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>rootFS</h4>
-<pre>final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.91">rootFS</a></pre>
+<pre>final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.89">rootFS</a></pre>
 </li>
 </ul>
 <a name="rsServices">
@@ -468,7 +487,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>rsServices</h4>
-<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.92">rsServices</a></pre>
+<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.90">rsServices</a></pre>
 </li>
 </ul>
 <a name="tableDescriptors">
@@ -477,7 +496,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tableDescriptors</h4>
-<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableDescriptors.html" title="interface in org.apache.hadoop.hbase">TableDescriptors</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.93">tableDescriptors</a></pre>
+<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableDescriptors.html" title="interface in org.apache.hadoop.hbase">TableDescriptors</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.91">tableDescriptors</a></pre>
 </li>
 </ul>
 <a name="outputSink">
@@ -486,7 +505,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>outputSink</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" title="class in org.apache.hadoop.hbase.wal">OutputSink</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.97">outputSink</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/wal/OutputSink.html" title="class in org.apache.hadoop.hbase.wal">OutputSink</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.95">outputSink</a></pre>
 </li>
 </ul>
 <a name="entryBuffers">
@@ -495,7 +514,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>entryBuffers</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.98">entryBuffers</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/EntryBuffers.html" title="class in org.apache.hadoop.hbase.wal">EntryBuffers</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.96">entryBuffers</a></pre>
 </li>
 </ul>
 <a name="splitLogWorkerCoordination">
@@ -504,7 +523,9 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>splitLogWorkerCoordination</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html" title="interface in org.apache.hadoop.hbase.coordination">SplitLogWorkerCoordination</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.100">splitLogWorkerCoordination</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html" title="interface in org.apache.hadoop.hbase.coordination">SplitLogWorkerCoordination</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.102">splitLogWorkerCoordination</a></pre>
+<div class="block">Coordinator for split log. Used by the zk-based log splitter.
+ Not used by the procedure v2-based log splitter.</div>
 </li>
 </ul>
 <a name="walFactory">
@@ -513,7 +534,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>walFactory</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.101">walFactory</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.104">walFactory</a></pre>
 </li>
 </ul>
 <a name="status">
@@ -522,7 +543,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>status</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.103">status</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.106">status</a></pre>
 </li>
 </ul>
 <a name="sequenceIdChecker">
@@ -531,7 +552,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>sequenceIdChecker</h4>
-<pre>protected final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/LastSequenceId.html" title="interface in org.apache.hadoop.hbase.regionserver">LastSequenceId</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.106">sequenceIdChecker</a></pre>
+<pre>protected final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/LastSequenceId.html" title="interface in org.apache.hadoop.hbase.regionserver">LastSequenceId</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.109">sequenceIdChecker</a></pre>
 </li>
 </ul>
 <a name="lastFlushedSequenceIds">
@@ -540,7 +561,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>lastFlushedSequenceIds</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt; <a href="../../../../../src-html/org/apache/hadoo [...]
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt; <a href="../../../../../src-html/org/apache/hadoo [...]
 </li>
 </ul>
 <a name="regionMaxSeqIdInStores">
@@ -549,7 +570,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>regionMaxSeqIdInStores</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/doc [...]
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/doc [...]
 </li>
 </ul>
 <a name="fileBeingSplit">
@@ -558,7 +579,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>fileBeingSplit</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileStatus <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.115">fileBeingSplit</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.FileStatus <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.118">fileBeingSplit</a></pre>
 </li>
 </ul>
 <a name="tmpDirName">
@@ -567,7 +588,34 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tmpDirName</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.117">tmpDirName</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.120">tmpDirName</a></pre>
+</li>
+</ul>
+<a name="WAL_SPLIT_TO_HFILE">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>WAL_SPLIT_TO_HFILE</h4>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.125">WAL_SPLIT_TO_HFILE</a></pre>
+<div class="block">Split WAL directly to hfiles instead of into intermediary 'recovered.edits' files.</div>
+<dl>
+<dt><span class="seeLabel">See Also:</span></dt>
+<dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.wal.WALSplitter.WAL_SPLIT_TO_HFILE">Constant Field Values</a></dd>
+</dl>
+</li>
+</ul>
+<a name="DEFAULT_WAL_SPLIT_TO_HFILE">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>DEFAULT_WAL_SPLIT_TO_HFILE</h4>
+<pre>public static final&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.126">DEFAULT_WAL_SPLIT_TO_HFILE</a></pre>
+<dl>
+<dt><span class="seeLabel">See Also:</span></dt>
+<dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.wal.WALSplitter.DEFAULT_WAL_SPLIT_TO_HFILE">Constant Field Values</a></dd>
+</dl>
 </li>
 </ul>
 <a name="SPLIT_WRITER_CREATION_BOUNDED">
@@ -576,7 +624,11 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>SPLIT_WRITER_CREATION_BOUNDED</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.119">SPLIT_WRITER_CREATION_BOUNDED</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.134">SPLIT_WRITER_CREATION_BOUNDED</a></pre>
+<div class="block">True if we are to run with bounded amount of writers rather than let the count blossom.
+ Default is 'false'. Does not apply if you have set 'hbase.wal.split.to.hfile' as that
+ is always bounded. Only applies when you are doing recovery to 'recovered.edits'
+ files (the old default). Bounded writing tends to have higher throughput.</div>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.wal.WALSplitter.SPLIT_WRITER_CREATION_BOUNDED">Constant Field Values</a></dd>
@@ -589,7 +641,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>SPLIT_WAL_BUFFER_SIZE</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.120">SPLIT_WAL_BUFFER_SIZE</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.136">SPLIT_WAL_BUFFER_SIZE</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.wal.WALSplitter.SPLIT_WAL_BUFFER_SIZE">Constant Field Values</a></dd>
@@ -602,7 +654,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SPLIT_WAL_WRITER_THREADS</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.121">SPLIT_WAL_WRITER_THREADS</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.137">SPLIT_WAL_WRITER_THREADS</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.wal.WALSplitter.SPLIT_WAL_WRITER_THREADS">Constant Field Values</a></dd>
@@ -623,7 +675,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>WALSplitter</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.125">WALSplitter</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.141">WALSplitter</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory,
             org.apache.hadoop.conf.Configuration&nbsp;conf,
             org.apache.hadoop.fs.Path&nbsp;walDir,
             org.apache.hadoop.fs.FileSystem&nbsp;walFS,
@@ -648,7 +700,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getWalFactory</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.171">getWalFactory</a>()</pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.187">getWalFactory</a>()</pre>
 </li>
 </ul>
 <a name="getFileBeingSplit--">
@@ -657,7 +709,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getFileBeingSplit</h4>
-<pre>org.apache.hadoop.fs.FileStatus&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.175">getFileBeingSplit</a>()</pre>
+<pre>org.apache.hadoop.fs.FileStatus&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.191">getFileBeingSplit</a>()</pre>
 </li>
 </ul>
 <a name="getTmpDirName--">
@@ -666,7 +718,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getTmpDirName</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.179">getTmpDirName</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.195">getTmpDirName</a>()</pre>
 </li>
 </ul>
 <a name="getRegionMaxSeqIdInStores--">
@@ -675,7 +727,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionMaxSeqIdInStores</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/lang [...]
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/lang [...]
 </li>
 </ul>
 <a name="splitLogFile-org.apache.hadoop.fs.Path-org.apache.hadoop.fs.FileStatus-org.apache.hadoop.fs.FileSystem-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.util.CancelableProgressable-org.apache.hadoop.hbase.regionserver.LastSequenceId-org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination-org.apache.hadoop.hbase.wal.WALFactory-org.apache.hadoop.hbase.regionserver.RegionServerServices-">
@@ -684,7 +736,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>splitLogFile</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.195">splitLogFile</a>(org.apache.hadoop.fs.Path&nbsp;walDir,
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.207">splitLogFile</a>(org.apache.hadoop.fs.Path&nbsp;walDir,
                                    org.apache.hadoop.fs.FileStatus&nbsp;logfile,
                                    org.apache.hadoop.fs.FileSystem&nbsp;walFS,
                                    org.apache.hadoop.conf.Configuration&nbsp;conf,
@@ -694,11 +746,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
                                    <a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory,
                                    <a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a>&nbsp;rsServices)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
-<div class="block">Splits a WAL file into region's recovered-edits directory.
- This is the main entry point for distributed log splitting from SplitLogWorker.
- <p>
- If the log file has N regions then N recovered.edits files will be produced.
- <p></div>
+<div class="block">Splits a WAL file.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>false if it is interrupted by the progress-able.</dd>
@@ -713,14 +761,20 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>split</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.211">split</a>(org.apache.hadoop.fs.Path&nbsp;walDir,
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.226">split</a>(org.apache.hadoop.fs.Path&nbsp;walDir,
                                                     org.apache.hadoop.fs.Path&nbsp;logDir,
                                                     org.apache.hadoop.fs.Path&nbsp;oldLogDir,
                                                     org.apache.hadoop.fs.FileSystem&nbsp;walFS,
                                                     org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                     <a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory)
                                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<div class="block">Split a folder of WAL files. Delete the directory when done.
+ Used by tools and unit tests. It should be package private.
+ It is public only because TestWALObserver is in a different package,
+ which uses this method to do log splitting.</div>
 <dl>
+<dt><span class="returnLabel">Returns:</span></dt>
+<dd>List of output files created by the split.</dd>
 <dt><span class="throwsLabel">Throws:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
 </dl>
@@ -732,10 +786,10 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>splitLogFile</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.241">splitLogFile</a>(org.apache.hadoop.fs.FileStatus&nbsp;logfile,
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.256">splitLogFile</a>(org.apache.hadoop.fs.FileStatus&nbsp;logfile,
                      <a href="../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html" title="interface in org.apache.hadoop.hbase.util">CancelableProgressable</a>&nbsp;reporter)
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
-<div class="block">log splitting implementation, splits one log file.</div>
+<div class="block">WAL splitting implementation, splits one log file.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>logfile</code> - should be an actual log file.</dd>
@@ -750,7 +804,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isRegionDirPresentUnderRoot</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.392">isRegionDirPresentUnderRoot</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.408">isRegionDirPresentUnderRoot</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                                             <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;regionName)
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -765,7 +819,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getReader</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.401">getReader</a>(org.apache.hadoop.fs.FileStatus&nbsp;file,
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.417">getReader</a>(org.apache.hadoop.fs.FileStatus&nbsp;file,
                              boolean&nbsp;skipErrors,
                              <a href="../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html" title="interface in org.apache.hadoop.hbase.util">CancelableProgressable</a>&nbsp;reporter)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
@@ -784,7 +838,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getNextLogLine</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Entry.html" title="class in org.apache.hadoop.hbase.wal">WAL.Entry</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.445">getNextLogLine</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;in,
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Entry.html" title="class in org.apache.hadoop.hbase.wal">WAL.Entry</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.461">getNextLogLine</a>(<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;in,
                                  org.apache.hadoop.fs.Path&nbsp;path,
                                  boolean&nbsp;skipErrors)
                           throws <a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter.CorruptedLogFileException</a>,
@@ -802,7 +856,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createWriter</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.Writer.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider.Writer</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.473">createWriter</a>(org.apache.hadoop.fs.Path&nbsp;logfile)
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.Writer.html" title="interface in org.apache.hadoop.hbase.wal">WALProvider.Writer</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.489">createWriter</a>(org.apache.hadoop.fs.Path&nbsp;logfile)
                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Create a new <a href="../../../../../org/apache/hadoop/hbase/wal/WALProvider.Writer.html" title="interface in org.apache.hadoop.hbase.wal"><code>WALProvider.Writer</code></a> for writing log splits.</div>
 <dl>
@@ -819,7 +873,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getReader</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.481">getReader</a>(org.apache.hadoop.fs.Path&nbsp;curLogFile,
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal">WAL.Reader</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.497">getReader</a>(org.apache.hadoop.fs.Path&nbsp;curLogFile,
                                <a href="../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html" title="interface in org.apache.hadoop.hbase.util">CancelableProgressable</a>&nbsp;reporter)
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Create a new <a href="../../../../../org/apache/hadoop/hbase/wal/WAL.Reader.html" title="interface in org.apache.hadoop.hbase.wal"><code>WAL.Reader</code></a> for reading logs to split.</div>
@@ -837,7 +891,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getNumOpenWriters</h4>
-<pre>private&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.488">getNumOpenWriters</a>()</pre>
+<pre>private&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/WALSplitter.html#line.504">getNumOpenWriters</a>()</pre>
 <div class="block">Get current open writers</div>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/class-use/OutputSink.html b/devapidocs/org/apache/hadoop/hbase/wal/class-use/OutputSink.html
index 8c51791..99e5e22 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/class-use/OutputSink.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/class-use/OutputSink.html
@@ -114,7 +114,9 @@
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>class&nbsp;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html" title="class in org.apache.hadoop.hbase.wal">BoundedRecoveredHFilesOutputSink</a></span></code>&nbsp;</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html" title="class in org.apache.hadoop.hbase.wal">BoundedRecoveredHFilesOutputSink</a></span></code>
+<div class="block">A WALSplitter sink that outputs <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>s.</div>
+</td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code>(package private) class&nbsp;</code></td>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/class-use/WALFactory.html b/devapidocs/org/apache/hadoop/hbase/wal/class-use/WALFactory.html
index b7c1446..533aeea 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/class-use/WALFactory.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/class-use/WALFactory.html
@@ -341,7 +341,9 @@
      org.apache.hadoop.fs.Path&nbsp;oldLogDir,
      org.apache.hadoop.fs.FileSystem&nbsp;walFS,
      org.apache.hadoop.conf.Configuration&nbsp;conf,
-     <a href="../../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory)</code>&nbsp;</td>
+     <a href="../../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory)</code>
+<div class="block">Split a folder of WAL files.</div>
+</td>
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>static boolean</code></td>
@@ -354,7 +356,7 @@
             <a href="../../../../../../org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.html" title="interface in org.apache.hadoop.hbase.coordination">SplitLogWorkerCoordination</a>&nbsp;splitLogWorkerCoordination,
             <a href="../../../../../../org/apache/hadoop/hbase/wal/WALFactory.html" title="class in org.apache.hadoop.hbase.wal">WALFactory</a>&nbsp;factory,
             <a href="../../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a>&nbsp;rsServices)</code>
-<div class="block">Splits a WAL file into region's recovered-edits directory.</div>
+<div class="block">Splits a WAL file.</div>
 </td>
 </tr>
 </tbody>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/package-summary.html b/devapidocs/org/apache/hadoop/hbase/wal/package-summary.html
index acffa65..505f110 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/package-summary.html
@@ -191,7 +191,9 @@
 </tr>
 <tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html" title="class in org.apache.hadoop.hbase.wal">BoundedRecoveredHFilesOutputSink</a></td>
-<td class="colLast">&nbsp;</td>
+<td class="colLast">
+<div class="block">A WALSplitter sink that outputs <a href="../../../../../org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile"><code>HFile</code></a>s.</div>
+</td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/wal/DisabledWALProvider.html" title="class in org.apache.hadoop.hbase.wal">DisabledWALProvider</a></td>
@@ -306,9 +308,7 @@
 <tr class="altColor">
 <td class="colFirst"><a href="../../../../../org/apache/hadoop/hbase/wal/WALSplitter.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter</a></td>
 <td class="colLast">
-<div class="block">This class is responsible for splitting up a bunch of regionserver commit log
- files that are no longer being written to, into new files, one per region, for
- recovering data on startup.</div>
+<div class="block">Split RegionServer WAL files.</div>
 </td>
 </tr>
 <tr class="rowColor">
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/package-use.html b/devapidocs/org/apache/hadoop/hbase/wal/package-use.html
index 8ad2004..e818708 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/package-use.html
@@ -568,9 +568,7 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
 </tr>
 <tr class="altColor">
 <td class="colOne"><a href="../../../../../org/apache/hadoop/hbase/wal/class-use/WALSplitter.html#org.apache.hadoop.hbase.wal">WALSplitter</a>
-<div class="block">This class is responsible for splitting up a bunch of regionserver commit log
- files that are no longer being written to, into new files, one per region, for
- recovering data on startup.</div>
+<div class="block">Split RegionServer WAL files.</div>
 </td>
 </tr>
 <tr class="rowColor">
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html b/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html
index c049b46..5fd1d77 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedInputStream.html
@@ -156,142 +156,152 @@
 <span class="sourceLineNo">148</span>          inStream.readFully(saslToken);<a name="line.148"></a>
 <span class="sourceLineNo">149</span>        }<a name="line.149"></a>
 <span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      if (LOG.isDebugEnabled()) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>        LOG.debug("SASL client context established. Negotiated QoP: "<a name="line.152"></a>
-<span class="sourceLineNo">153</span>            + saslClient.getNegotiatedProperty(Sasl.QOP));<a name="line.153"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>      try {<a name="line.152"></a>
+<span class="sourceLineNo">153</span>        readStatus(inStream);<a name="line.153"></a>
 <span class="sourceLineNo">154</span>      }<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      // initial the inputStream, outputStream for both Sasl encryption<a name="line.155"></a>
-<span class="sourceLineNo">156</span>      // and Crypto AES encryption if necessary<a name="line.156"></a>
-<span class="sourceLineNo">157</span>      // if Crypto AES encryption enabled, the saslInputStream/saslOutputStream is<a name="line.157"></a>
-<span class="sourceLineNo">158</span>      // only responsible for connection header negotiation,<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      // cryptoInputStream/cryptoOutputStream is responsible for rpc encryption with Crypto AES<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      saslInputStream = new SaslInputStream(inS, saslClient);<a name="line.160"></a>
-<span class="sourceLineNo">161</span>      saslOutputStream = new SaslOutputStream(outS, saslClient);<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      if (initStreamForCrypto) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>        cryptoInputStream = new WrappedInputStream(inS);<a name="line.163"></a>
-<span class="sourceLineNo">164</span>        cryptoOutputStream = new WrappedOutputStream(outS);<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      }<a name="line.165"></a>
-<span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>      return true;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    } catch (IOException e) {<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      try {<a name="line.169"></a>
-<span class="sourceLineNo">170</span>        saslClient.dispose();<a name="line.170"></a>
-<span class="sourceLineNo">171</span>      } catch (SaslException ignored) {<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        // ignore further exceptions during cleanup<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      }<a name="line.173"></a>
-<span class="sourceLineNo">174</span>      throw e;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>    }<a name="line.175"></a>
-<span class="sourceLineNo">176</span>  }<a name="line.176"></a>
-<span class="sourceLineNo">177</span><a name="line.177"></a>
-<span class="sourceLineNo">178</span>  public String getSaslQOP() {<a name="line.178"></a>
-<span class="sourceLineNo">179</span>    return (String) saslClient.getNegotiatedProperty(Sasl.QOP);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  }<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>  public void initCryptoCipher(RPCProtos.CryptoCipherMeta cryptoCipherMeta,<a name="line.182"></a>
-<span class="sourceLineNo">183</span>      Configuration conf) throws IOException {<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    // create SaslAES for client<a name="line.184"></a>
-<span class="sourceLineNo">185</span>    cryptoAES = EncryptionUtil.createCryptoAES(cryptoCipherMeta, conf);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    cryptoAesEnable = true;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  }<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>  /**<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * Get a SASL wrapped InputStream. Can be called only after saslConnect() has been called.<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * @return a SASL wrapped InputStream<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * @throws IOException<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   */<a name="line.193"></a>
-<span class="sourceLineNo">194</span>  public InputStream getInputStream() throws IOException {<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    if (!saslClient.isComplete()) {<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      throw new IOException("Sasl authentication exchange hasn't completed yet");<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    }<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    // If Crypto AES is enabled, return cryptoInputStream which unwrap the data with Crypto AES.<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    if (cryptoAesEnable &amp;&amp; cryptoInputStream != null) {<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      return cryptoInputStream;<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    }<a name="line.201"></a>
-<span class="sourceLineNo">202</span>    return saslInputStream;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>  }<a name="line.203"></a>
-<span class="sourceLineNo">204</span><a name="line.204"></a>
-<span class="sourceLineNo">205</span>  class WrappedInputStream extends FilterInputStream {<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    private ByteBuffer unwrappedRpcBuffer = ByteBuffer.allocate(0);<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    public WrappedInputStream(InputStream in) throws IOException {<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      super(in);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    }<a name="line.209"></a>
-<span class="sourceLineNo">210</span><a name="line.210"></a>
-<span class="sourceLineNo">211</span>    @Override<a name="line.211"></a>
-<span class="sourceLineNo">212</span>    public int read() throws IOException {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      byte[] b = new byte[1];<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      int n = read(b, 0, 1);<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      return (n != -1) ? b[0] : -1;<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    }<a name="line.216"></a>
-<span class="sourceLineNo">217</span><a name="line.217"></a>
-<span class="sourceLineNo">218</span>    @Override<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    public int read(byte b[]) throws IOException {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      return read(b, 0, b.length);<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    }<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    @Override<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    public synchronized int read(byte[] buf, int off, int len) throws IOException {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      // fill the buffer with the next RPC message<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      if (unwrappedRpcBuffer.remaining() == 0) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        readNextRpcPacket();<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
-<span class="sourceLineNo">229</span>      // satisfy as much of the request as possible<a name="line.229"></a>
-<span class="sourceLineNo">230</span>      int readLen = Math.min(len, unwrappedRpcBuffer.remaining());<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      unwrappedRpcBuffer.get(buf, off, readLen);<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      return readLen;<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    }<a name="line.233"></a>
-<span class="sourceLineNo">234</span><a name="line.234"></a>
-<span class="sourceLineNo">235</span>    // unwrap messages with Crypto AES<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    private void readNextRpcPacket() throws IOException {<a name="line.236"></a>
-<span class="sourceLineNo">237</span>      LOG.debug("reading next wrapped RPC packet");<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      DataInputStream dis = new DataInputStream(in);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      int rpcLen = dis.readInt();<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      byte[] rpcBuf = new byte[rpcLen];<a name="line.240"></a>
-<span class="sourceLineNo">241</span>      dis.readFully(rpcBuf);<a name="line.241"></a>
-<span class="sourceLineNo">242</span><a name="line.242"></a>
-<span class="sourceLineNo">243</span>      // unwrap with Crypto AES<a name="line.243"></a>
-<span class="sourceLineNo">244</span>      rpcBuf = cryptoAES.unwrap(rpcBuf, 0, rpcBuf.length);<a name="line.244"></a>
-<span class="sourceLineNo">245</span>      if (LOG.isDebugEnabled()) {<a name="line.245"></a>
-<span class="sourceLineNo">246</span>        LOG.debug("unwrapping token of length:" + rpcBuf.length);<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      }<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      unwrappedRpcBuffer = ByteBuffer.wrap(rpcBuf);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    }<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  }<a name="line.250"></a>
-<span class="sourceLineNo">251</span><a name="line.251"></a>
-<span class="sourceLineNo">252</span>  /**<a name="line.252"></a>
-<span class="sourceLineNo">253</span>   * Get a SASL wrapped OutputStream. Can be called only after saslConnect() has been called.<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * @return a SASL wrapped OutputStream<a name="line.254"></a>
-<span class="sourceLineNo">255</span>   * @throws IOException<a name="line.255"></a>
-<span class="sourceLineNo">256</span>   */<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  public OutputStream getOutputStream() throws IOException {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    if (!saslClient.isComplete()) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      throw new IOException("Sasl authentication exchange hasn't completed yet");<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    // If Crypto AES is enabled, return cryptoOutputStream which wrap the data with Crypto AES.<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    if (cryptoAesEnable &amp;&amp; cryptoOutputStream != null) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      return cryptoOutputStream;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    return saslOutputStream;<a name="line.265"></a>
-<span class="sourceLineNo">266</span>  }<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  class WrappedOutputStream extends FilterOutputStream {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    public WrappedOutputStream(OutputStream out) throws IOException {<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      super(out);<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    }<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    @Override<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    public void write(byte[] buf, int off, int len) throws IOException {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      if (LOG.isDebugEnabled()) {<a name="line.274"></a>
-<span class="sourceLineNo">275</span>        LOG.debug("wrapping token of length:" + len);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      }<a name="line.276"></a>
+<span class="sourceLineNo">155</span>      catch (IOException e){<a name="line.155"></a>
+<span class="sourceLineNo">156</span>        if(e instanceof RemoteException){<a name="line.156"></a>
+<span class="sourceLineNo">157</span>          LOG.debug("Sasl connection failed: ", e);<a name="line.157"></a>
+<span class="sourceLineNo">158</span>          throw e;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>        }<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      }<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      if (LOG.isDebugEnabled()) {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>        LOG.debug("SASL client context established. Negotiated QoP: "<a name="line.162"></a>
+<span class="sourceLineNo">163</span>            + saslClient.getNegotiatedProperty(Sasl.QOP));<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      }<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      // initial the inputStream, outputStream for both Sasl encryption<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      // and Crypto AES encryption if necessary<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      // if Crypto AES encryption enabled, the saslInputStream/saslOutputStream is<a name="line.167"></a>
+<span class="sourceLineNo">168</span>      // only responsible for connection header negotiation,<a name="line.168"></a>
+<span class="sourceLineNo">169</span>      // cryptoInputStream/cryptoOutputStream is responsible for rpc encryption with Crypto AES<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      saslInputStream = new SaslInputStream(inS, saslClient);<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      saslOutputStream = new SaslOutputStream(outS, saslClient);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>      if (initStreamForCrypto) {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>        cryptoInputStream = new WrappedInputStream(inS);<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        cryptoOutputStream = new WrappedOutputStream(outS);<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      }<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>      return true;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    } catch (IOException e) {<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      try {<a name="line.179"></a>
+<span class="sourceLineNo">180</span>        saslClient.dispose();<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      } catch (SaslException ignored) {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>        // ignore further exceptions during cleanup<a name="line.182"></a>
+<span class="sourceLineNo">183</span>      }<a name="line.183"></a>
+<span class="sourceLineNo">184</span>      throw e;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    }<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  public String getSaslQOP() {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    return (String) saslClient.getNegotiatedProperty(Sasl.QOP);<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  }<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>  public void initCryptoCipher(RPCProtos.CryptoCipherMeta cryptoCipherMeta,<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      Configuration conf) throws IOException {<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    // create SaslAES for client<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    cryptoAES = EncryptionUtil.createCryptoAES(cryptoCipherMeta, conf);<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    cryptoAesEnable = true;<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  }<a name="line.197"></a>
+<span class="sourceLineNo">198</span><a name="line.198"></a>
+<span class="sourceLineNo">199</span>  /**<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * Get a SASL wrapped InputStream. Can be called only after saslConnect() has been called.<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * @return a SASL wrapped InputStream<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @throws IOException<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   */<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  public InputStream getInputStream() throws IOException {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    if (!saslClient.isComplete()) {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      throw new IOException("Sasl authentication exchange hasn't completed yet");<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    }<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    // If Crypto AES is enabled, return cryptoInputStream which unwrap the data with Crypto AES.<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    if (cryptoAesEnable &amp;&amp; cryptoInputStream != null) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      return cryptoInputStream;<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    }<a name="line.211"></a>
+<span class="sourceLineNo">212</span>    return saslInputStream;<a name="line.212"></a>
+<span class="sourceLineNo">213</span>  }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>  class WrappedInputStream extends FilterInputStream {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    private ByteBuffer unwrappedRpcBuffer = ByteBuffer.allocate(0);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    public WrappedInputStream(InputStream in) throws IOException {<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      super(in);<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    }<a name="line.219"></a>
+<span class="sourceLineNo">220</span><a name="line.220"></a>
+<span class="sourceLineNo">221</span>    @Override<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    public int read() throws IOException {<a name="line.222"></a>
+<span class="sourceLineNo">223</span>      byte[] b = new byte[1];<a name="line.223"></a>
+<span class="sourceLineNo">224</span>      int n = read(b, 0, 1);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>      return (n != -1) ? b[0] : -1;<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    }<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>    @Override<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    public int read(byte b[]) throws IOException {<a name="line.229"></a>
+<span class="sourceLineNo">230</span>      return read(b, 0, b.length);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    }<a name="line.231"></a>
+<span class="sourceLineNo">232</span><a name="line.232"></a>
+<span class="sourceLineNo">233</span>    @Override<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    public synchronized int read(byte[] buf, int off, int len) throws IOException {<a name="line.234"></a>
+<span class="sourceLineNo">235</span>      // fill the buffer with the next RPC message<a name="line.235"></a>
+<span class="sourceLineNo">236</span>      if (unwrappedRpcBuffer.remaining() == 0) {<a name="line.236"></a>
+<span class="sourceLineNo">237</span>        readNextRpcPacket();<a name="line.237"></a>
+<span class="sourceLineNo">238</span>      }<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      // satisfy as much of the request as possible<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      int readLen = Math.min(len, unwrappedRpcBuffer.remaining());<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      unwrappedRpcBuffer.get(buf, off, readLen);<a name="line.241"></a>
+<span class="sourceLineNo">242</span>      return readLen;<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    }<a name="line.243"></a>
+<span class="sourceLineNo">244</span><a name="line.244"></a>
+<span class="sourceLineNo">245</span>    // unwrap messages with Crypto AES<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    private void readNextRpcPacket() throws IOException {<a name="line.246"></a>
+<span class="sourceLineNo">247</span>      LOG.debug("reading next wrapped RPC packet");<a name="line.247"></a>
+<span class="sourceLineNo">248</span>      DataInputStream dis = new DataInputStream(in);<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      int rpcLen = dis.readInt();<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      byte[] rpcBuf = new byte[rpcLen];<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      dis.readFully(rpcBuf);<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>      // unwrap with Crypto AES<a name="line.253"></a>
+<span class="sourceLineNo">254</span>      rpcBuf = cryptoAES.unwrap(rpcBuf, 0, rpcBuf.length);<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      if (LOG.isDebugEnabled()) {<a name="line.255"></a>
+<span class="sourceLineNo">256</span>        LOG.debug("unwrapping token of length:" + rpcBuf.length);<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      }<a name="line.257"></a>
+<span class="sourceLineNo">258</span>      unwrappedRpcBuffer = ByteBuffer.wrap(rpcBuf);<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    }<a name="line.259"></a>
+<span class="sourceLineNo">260</span>  }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>  /**<a name="line.262"></a>
+<span class="sourceLineNo">263</span>   * Get a SASL wrapped OutputStream. Can be called only after saslConnect() has been called.<a name="line.263"></a>
+<span class="sourceLineNo">264</span>   * @return a SASL wrapped OutputStream<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * @throws IOException<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   */<a name="line.266"></a>
+<span class="sourceLineNo">267</span>  public OutputStream getOutputStream() throws IOException {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    if (!saslClient.isComplete()) {<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      throw new IOException("Sasl authentication exchange hasn't completed yet");<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    }<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    // If Crypto AES is enabled, return cryptoOutputStream which wrap the data with Crypto AES.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    if (cryptoAesEnable &amp;&amp; cryptoOutputStream != null) {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>      return cryptoOutputStream;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    }<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    return saslOutputStream;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  }<a name="line.276"></a>
 <span class="sourceLineNo">277</span><a name="line.277"></a>
-<span class="sourceLineNo">278</span>      // wrap with Crypto AES<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      byte[] wrapped = cryptoAES.wrap(buf, off, len);<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      DataOutputStream dob = new DataOutputStream(out);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      dob.writeInt(wrapped.length);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      dob.write(wrapped, 0, wrapped.length);<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      dob.flush();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    }<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  }<a name="line.285"></a>
-<span class="sourceLineNo">286</span>}<a name="line.286"></a>
+<span class="sourceLineNo">278</span>  class WrappedOutputStream extends FilterOutputStream {<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    public WrappedOutputStream(OutputStream out) throws IOException {<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      super(out);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>    }<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    @Override<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    public void write(byte[] buf, int off, int len) throws IOException {<a name="line.283"></a>
+<span class="sourceLineNo">284</span>      if (LOG.isDebugEnabled()) {<a name="line.284"></a>
+<span class="sourceLineNo">285</span>        LOG.debug("wrapping token of length:" + len);<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>      // wrap with Crypto AES<a name="line.288"></a>
+<span class="sourceLineNo">289</span>      byte[] wrapped = cryptoAES.wrap(buf, off, len);<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      DataOutputStream dob = new DataOutputStream(out);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      dob.writeInt(wrapped.length);<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      dob.write(wrapped, 0, wrapped.length);<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      dob.flush();<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    }<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  }<a name="line.295"></a>
+<span class="sourceLineNo">296</span>}<a name="line.296"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedOutputStream.html b/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedOutputStream.html
index c049b46..5fd1d77 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedOutputStream.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.WrappedOutputStream.html
@@ -156,142 +156,152 @@
 <span class="sourceLineNo">148</span>          inStream.readFully(saslToken);<a name="line.148"></a>
 <span class="sourceLineNo">149</span>        }<a name="line.149"></a>
 <span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      if (LOG.isDebugEnabled()) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>        LOG.debug("SASL client context established. Negotiated QoP: "<a name="line.152"></a>
-<span class="sourceLineNo">153</span>            + saslClient.getNegotiatedProperty(Sasl.QOP));<a name="line.153"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>      try {<a name="line.152"></a>
+<span class="sourceLineNo">153</span>        readStatus(inStream);<a name="line.153"></a>
 <span class="sourceLineNo">154</span>      }<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      // initial the inputStream, outputStream for both Sasl encryption<a name="line.155"></a>
-<span class="sourceLineNo">156</span>      // and Crypto AES encryption if necessary<a name="line.156"></a>
-<span class="sourceLineNo">157</span>      // if Crypto AES encryption enabled, the saslInputStream/saslOutputStream is<a name="line.157"></a>
-<span class="sourceLineNo">158</span>      // only responsible for connection header negotiation,<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      // cryptoInputStream/cryptoOutputStream is responsible for rpc encryption with Crypto AES<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      saslInputStream = new SaslInputStream(inS, saslClient);<a name="line.160"></a>
-<span class="sourceLineNo">161</span>      saslOutputStream = new SaslOutputStream(outS, saslClient);<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      if (initStreamForCrypto) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>        cryptoInputStream = new WrappedInputStream(inS);<a name="line.163"></a>
-<span class="sourceLineNo">164</span>        cryptoOutputStream = new WrappedOutputStream(outS);<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      }<a name="line.165"></a>
-<span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>      return true;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    } catch (IOException e) {<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      try {<a name="line.169"></a>
-<span class="sourceLineNo">170</span>        saslClient.dispose();<a name="line.170"></a>
-<span class="sourceLineNo">171</span>      } catch (SaslException ignored) {<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        // ignore further exceptions during cleanup<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      }<a name="line.173"></a>
-<span class="sourceLineNo">174</span>      throw e;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>    }<a name="line.175"></a>
-<span class="sourceLineNo">176</span>  }<a name="line.176"></a>
-<span class="sourceLineNo">177</span><a name="line.177"></a>
-<span class="sourceLineNo">178</span>  public String getSaslQOP() {<a name="line.178"></a>
-<span class="sourceLineNo">179</span>    return (String) saslClient.getNegotiatedProperty(Sasl.QOP);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  }<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>  public void initCryptoCipher(RPCProtos.CryptoCipherMeta cryptoCipherMeta,<a name="line.182"></a>
-<span class="sourceLineNo">183</span>      Configuration conf) throws IOException {<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    // create SaslAES for client<a name="line.184"></a>
-<span class="sourceLineNo">185</span>    cryptoAES = EncryptionUtil.createCryptoAES(cryptoCipherMeta, conf);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    cryptoAesEnable = true;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  }<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>  /**<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * Get a SASL wrapped InputStream. Can be called only after saslConnect() has been called.<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * @return a SASL wrapped InputStream<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * @throws IOException<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   */<a name="line.193"></a>
-<span class="sourceLineNo">194</span>  public InputStream getInputStream() throws IOException {<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    if (!saslClient.isComplete()) {<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      throw new IOException("Sasl authentication exchange hasn't completed yet");<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    }<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    // If Crypto AES is enabled, return cryptoInputStream which unwrap the data with Crypto AES.<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    if (cryptoAesEnable &amp;&amp; cryptoInputStream != null) {<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      return cryptoInputStream;<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    }<a name="line.201"></a>
-<span class="sourceLineNo">202</span>    return saslInputStream;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>  }<a name="line.203"></a>
-<span class="sourceLineNo">204</span><a name="line.204"></a>
-<span class="sourceLineNo">205</span>  class WrappedInputStream extends FilterInputStream {<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    private ByteBuffer unwrappedRpcBuffer = ByteBuffer.allocate(0);<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    public WrappedInputStream(InputStream in) throws IOException {<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      super(in);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    }<a name="line.209"></a>
-<span class="sourceLineNo">210</span><a name="line.210"></a>
-<span class="sourceLineNo">211</span>    @Override<a name="line.211"></a>
-<span class="sourceLineNo">212</span>    public int read() throws IOException {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      byte[] b = new byte[1];<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      int n = read(b, 0, 1);<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      return (n != -1) ? b[0] : -1;<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    }<a name="line.216"></a>
-<span class="sourceLineNo">217</span><a name="line.217"></a>
-<span class="sourceLineNo">218</span>    @Override<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    public int read(byte b[]) throws IOException {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      return read(b, 0, b.length);<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    }<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    @Override<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    public synchronized int read(byte[] buf, int off, int len) throws IOException {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      // fill the buffer with the next RPC message<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      if (unwrappedRpcBuffer.remaining() == 0) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        readNextRpcPacket();<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
-<span class="sourceLineNo">229</span>      // satisfy as much of the request as possible<a name="line.229"></a>
-<span class="sourceLineNo">230</span>      int readLen = Math.min(len, unwrappedRpcBuffer.remaining());<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      unwrappedRpcBuffer.get(buf, off, readLen);<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      return readLen;<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    }<a name="line.233"></a>
-<span class="sourceLineNo">234</span><a name="line.234"></a>
-<span class="sourceLineNo">235</span>    // unwrap messages with Crypto AES<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    private void readNextRpcPacket() throws IOException {<a name="line.236"></a>
-<span class="sourceLineNo">237</span>      LOG.debug("reading next wrapped RPC packet");<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      DataInputStream dis = new DataInputStream(in);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      int rpcLen = dis.readInt();<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      byte[] rpcBuf = new byte[rpcLen];<a name="line.240"></a>
-<span class="sourceLineNo">241</span>      dis.readFully(rpcBuf);<a name="line.241"></a>
-<span class="sourceLineNo">242</span><a name="line.242"></a>
-<span class="sourceLineNo">243</span>      // unwrap with Crypto AES<a name="line.243"></a>
-<span class="sourceLineNo">244</span>      rpcBuf = cryptoAES.unwrap(rpcBuf, 0, rpcBuf.length);<a name="line.244"></a>
-<span class="sourceLineNo">245</span>      if (LOG.isDebugEnabled()) {<a name="line.245"></a>
-<span class="sourceLineNo">246</span>        LOG.debug("unwrapping token of length:" + rpcBuf.length);<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      }<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      unwrappedRpcBuffer = ByteBuffer.wrap(rpcBuf);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    }<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  }<a name="line.250"></a>
-<span class="sourceLineNo">251</span><a name="line.251"></a>
-<span class="sourceLineNo">252</span>  /**<a name="line.252"></a>
-<span class="sourceLineNo">253</span>   * Get a SASL wrapped OutputStream. Can be called only after saslConnect() has been called.<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * @return a SASL wrapped OutputStream<a name="line.254"></a>
-<span class="sourceLineNo">255</span>   * @throws IOException<a name="line.255"></a>
-<span class="sourceLineNo">256</span>   */<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  public OutputStream getOutputStream() throws IOException {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    if (!saslClient.isComplete()) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      throw new IOException("Sasl authentication exchange hasn't completed yet");<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    // If Crypto AES is enabled, return cryptoOutputStream which wrap the data with Crypto AES.<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    if (cryptoAesEnable &amp;&amp; cryptoOutputStream != null) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      return cryptoOutputStream;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    return saslOutputStream;<a name="line.265"></a>
-<span class="sourceLineNo">266</span>  }<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  class WrappedOutputStream extends FilterOutputStream {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    public WrappedOutputStream(OutputStream out) throws IOException {<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      super(out);<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    }<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    @Override<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    public void write(byte[] buf, int off, int len) throws IOException {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      if (LOG.isDebugEnabled()) {<a name="line.274"></a>
-<span class="sourceLineNo">275</span>        LOG.debug("wrapping token of length:" + len);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      }<a name="line.276"></a>
+<span class="sourceLineNo">155</span>      catch (IOException e){<a name="line.155"></a>
+<span class="sourceLineNo">156</span>        if(e instanceof RemoteException){<a name="line.156"></a>
+<span class="sourceLineNo">157</span>          LOG.debug("Sasl connection failed: ", e);<a name="line.157"></a>
+<span class="sourceLineNo">158</span>          throw e;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>        }<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      }<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      if (LOG.isDebugEnabled()) {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>        LOG.debug("SASL client context established. Negotiated QoP: "<a name="line.162"></a>
+<span class="sourceLineNo">163</span>            + saslClient.getNegotiatedProperty(Sasl.QOP));<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      }<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      // initial the inputStream, outputStream for both Sasl encryption<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      // and Crypto AES encryption if necessary<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      // if Crypto AES encryption enabled, the saslInputStream/saslOutputStream is<a name="line.167"></a>
+<span class="sourceLineNo">168</span>      // only responsible for connection header negotiation,<a name="line.168"></a>
+<span class="sourceLineNo">169</span>      // cryptoInputStream/cryptoOutputStream is responsible for rpc encryption with Crypto AES<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      saslInputStream = new SaslInputStream(inS, saslClient);<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      saslOutputStream = new SaslOutputStream(outS, saslClient);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>      if (initStreamForCrypto) {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>        cryptoInputStream = new WrappedInputStream(inS);<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        cryptoOutputStream = new WrappedOutputStream(outS);<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      }<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>      return true;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    } catch (IOException e) {<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      try {<a name="line.179"></a>
+<span class="sourceLineNo">180</span>        saslClient.dispose();<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      } catch (SaslException ignored) {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>        // ignore further exceptions during cleanup<a name="line.182"></a>
+<span class="sourceLineNo">183</span>      }<a name="line.183"></a>
+<span class="sourceLineNo">184</span>      throw e;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    }<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  public String getSaslQOP() {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    return (String) saslClient.getNegotiatedProperty(Sasl.QOP);<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  }<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>  public void initCryptoCipher(RPCProtos.CryptoCipherMeta cryptoCipherMeta,<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      Configuration conf) throws IOException {<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    // create SaslAES for client<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    cryptoAES = EncryptionUtil.createCryptoAES(cryptoCipherMeta, conf);<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    cryptoAesEnable = true;<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  }<a name="line.197"></a>
+<span class="sourceLineNo">198</span><a name="line.198"></a>
+<span class="sourceLineNo">199</span>  /**<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * Get a SASL wrapped InputStream. Can be called only after saslConnect() has been called.<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * @return a SASL wrapped InputStream<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @throws IOException<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   */<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  public InputStream getInputStream() throws IOException {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    if (!saslClient.isComplete()) {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      throw new IOException("Sasl authentication exchange hasn't completed yet");<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    }<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    // If Crypto AES is enabled, return cryptoInputStream which unwrap the data with Crypto AES.<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    if (cryptoAesEnable &amp;&amp; cryptoInputStream != null) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      return cryptoInputStream;<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    }<a name="line.211"></a>
+<span class="sourceLineNo">212</span>    return saslInputStream;<a name="line.212"></a>
+<span class="sourceLineNo">213</span>  }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>  class WrappedInputStream extends FilterInputStream {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    private ByteBuffer unwrappedRpcBuffer = ByteBuffer.allocate(0);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    public WrappedInputStream(InputStream in) throws IOException {<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      super(in);<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    }<a name="line.219"></a>
+<span class="sourceLineNo">220</span><a name="line.220"></a>
+<span class="sourceLineNo">221</span>    @Override<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    public int read() throws IOException {<a name="line.222"></a>
+<span class="sourceLineNo">223</span>      byte[] b = new byte[1];<a name="line.223"></a>
+<span class="sourceLineNo">224</span>      int n = read(b, 0, 1);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>      return (n != -1) ? b[0] : -1;<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    }<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>    @Override<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    public int read(byte b[]) throws IOException {<a name="line.229"></a>
+<span class="sourceLineNo">230</span>      return read(b, 0, b.length);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    }<a name="line.231"></a>
+<span class="sourceLineNo">232</span><a name="line.232"></a>
+<span class="sourceLineNo">233</span>    @Override<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    public synchronized int read(byte[] buf, int off, int len) throws IOException {<a name="line.234"></a>
+<span class="sourceLineNo">235</span>      // fill the buffer with the next RPC message<a name="line.235"></a>
+<span class="sourceLineNo">236</span>      if (unwrappedRpcBuffer.remaining() == 0) {<a name="line.236"></a>
+<span class="sourceLineNo">237</span>        readNextRpcPacket();<a name="line.237"></a>
+<span class="sourceLineNo">238</span>      }<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      // satisfy as much of the request as possible<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      int readLen = Math.min(len, unwrappedRpcBuffer.remaining());<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      unwrappedRpcBuffer.get(buf, off, readLen);<a name="line.241"></a>
+<span class="sourceLineNo">242</span>      return readLen;<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    }<a name="line.243"></a>
+<span class="sourceLineNo">244</span><a name="line.244"></a>
+<span class="sourceLineNo">245</span>    // unwrap messages with Crypto AES<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    private void readNextRpcPacket() throws IOException {<a name="line.246"></a>
+<span class="sourceLineNo">247</span>      LOG.debug("reading next wrapped RPC packet");<a name="line.247"></a>
+<span class="sourceLineNo">248</span>      DataInputStream dis = new DataInputStream(in);<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      int rpcLen = dis.readInt();<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      byte[] rpcBuf = new byte[rpcLen];<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      dis.readFully(rpcBuf);<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>      // unwrap with Crypto AES<a name="line.253"></a>
+<span class="sourceLineNo">254</span>      rpcBuf = cryptoAES.unwrap(rpcBuf, 0, rpcBuf.length);<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      if (LOG.isDebugEnabled()) {<a name="line.255"></a>
+<span class="sourceLineNo">256</span>        LOG.debug("unwrapping token of length:" + rpcBuf.length);<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      }<a name="line.257"></a>
+<span class="sourceLineNo">258</span>      unwrappedRpcBuffer = ByteBuffer.wrap(rpcBuf);<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    }<a name="line.259"></a>
+<span class="sourceLineNo">260</span>  }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>  /**<a name="line.262"></a>
+<span class="sourceLineNo">263</span>   * Get a SASL wrapped OutputStream. Can be called only after saslConnect() has been called.<a name="line.263"></a>
+<span class="sourceLineNo">264</span>   * @return a SASL wrapped OutputStream<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * @throws IOException<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   */<a name="line.266"></a>
+<span class="sourceLineNo">267</span>  public OutputStream getOutputStream() throws IOException {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    if (!saslClient.isComplete()) {<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      throw new IOException("Sasl authentication exchange hasn't completed yet");<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    }<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    // If Crypto AES is enabled, return cryptoOutputStream which wrap the data with Crypto AES.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    if (cryptoAesEnable &amp;&amp; cryptoOutputStream != null) {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>      return cryptoOutputStream;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    }<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    return saslOutputStream;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  }<a name="line.276"></a>
 <span class="sourceLineNo">277</span><a name="line.277"></a>
-<span class="sourceLineNo">278</span>      // wrap with Crypto AES<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      byte[] wrapped = cryptoAES.wrap(buf, off, len);<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      DataOutputStream dob = new DataOutputStream(out);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      dob.writeInt(wrapped.length);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      dob.write(wrapped, 0, wrapped.length);<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      dob.flush();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    }<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  }<a name="line.285"></a>
-<span class="sourceLineNo">286</span>}<a name="line.286"></a>
+<span class="sourceLineNo">278</span>  class WrappedOutputStream extends FilterOutputStream {<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    public WrappedOutputStream(OutputStream out) throws IOException {<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      super(out);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>    }<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    @Override<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    public void write(byte[] buf, int off, int len) throws IOException {<a name="line.283"></a>
+<span class="sourceLineNo">284</span>      if (LOG.isDebugEnabled()) {<a name="line.284"></a>
+<span class="sourceLineNo">285</span>        LOG.debug("wrapping token of length:" + len);<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>      // wrap with Crypto AES<a name="line.288"></a>
+<span class="sourceLineNo">289</span>      byte[] wrapped = cryptoAES.wrap(buf, off, len);<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      DataOutputStream dob = new DataOutputStream(out);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      dob.writeInt(wrapped.length);<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      dob.write(wrapped, 0, wrapped.length);<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      dob.flush();<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    }<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  }<a name="line.295"></a>
+<span class="sourceLineNo">296</span>}<a name="line.296"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html b/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html
index c049b46..5fd1d77 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.html
@@ -156,142 +156,152 @@
 <span class="sourceLineNo">148</span>          inStream.readFully(saslToken);<a name="line.148"></a>
 <span class="sourceLineNo">149</span>        }<a name="line.149"></a>
 <span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      if (LOG.isDebugEnabled()) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>        LOG.debug("SASL client context established. Negotiated QoP: "<a name="line.152"></a>
-<span class="sourceLineNo">153</span>            + saslClient.getNegotiatedProperty(Sasl.QOP));<a name="line.153"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>      try {<a name="line.152"></a>
+<span class="sourceLineNo">153</span>        readStatus(inStream);<a name="line.153"></a>
 <span class="sourceLineNo">154</span>      }<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      // initial the inputStream, outputStream for both Sasl encryption<a name="line.155"></a>
-<span class="sourceLineNo">156</span>      // and Crypto AES encryption if necessary<a name="line.156"></a>
-<span class="sourceLineNo">157</span>      // if Crypto AES encryption enabled, the saslInputStream/saslOutputStream is<a name="line.157"></a>
-<span class="sourceLineNo">158</span>      // only responsible for connection header negotiation,<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      // cryptoInputStream/cryptoOutputStream is responsible for rpc encryption with Crypto AES<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      saslInputStream = new SaslInputStream(inS, saslClient);<a name="line.160"></a>
-<span class="sourceLineNo">161</span>      saslOutputStream = new SaslOutputStream(outS, saslClient);<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      if (initStreamForCrypto) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>        cryptoInputStream = new WrappedInputStream(inS);<a name="line.163"></a>
-<span class="sourceLineNo">164</span>        cryptoOutputStream = new WrappedOutputStream(outS);<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      }<a name="line.165"></a>
-<span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>      return true;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    } catch (IOException e) {<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      try {<a name="line.169"></a>
-<span class="sourceLineNo">170</span>        saslClient.dispose();<a name="line.170"></a>
-<span class="sourceLineNo">171</span>      } catch (SaslException ignored) {<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        // ignore further exceptions during cleanup<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      }<a name="line.173"></a>
-<span class="sourceLineNo">174</span>      throw e;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>    }<a name="line.175"></a>
-<span class="sourceLineNo">176</span>  }<a name="line.176"></a>
-<span class="sourceLineNo">177</span><a name="line.177"></a>
-<span class="sourceLineNo">178</span>  public String getSaslQOP() {<a name="line.178"></a>
-<span class="sourceLineNo">179</span>    return (String) saslClient.getNegotiatedProperty(Sasl.QOP);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  }<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>  public void initCryptoCipher(RPCProtos.CryptoCipherMeta cryptoCipherMeta,<a name="line.182"></a>
-<span class="sourceLineNo">183</span>      Configuration conf) throws IOException {<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    // create SaslAES for client<a name="line.184"></a>
-<span class="sourceLineNo">185</span>    cryptoAES = EncryptionUtil.createCryptoAES(cryptoCipherMeta, conf);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    cryptoAesEnable = true;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  }<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>  /**<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * Get a SASL wrapped InputStream. Can be called only after saslConnect() has been called.<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * @return a SASL wrapped InputStream<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * @throws IOException<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   */<a name="line.193"></a>
-<span class="sourceLineNo">194</span>  public InputStream getInputStream() throws IOException {<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    if (!saslClient.isComplete()) {<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      throw new IOException("Sasl authentication exchange hasn't completed yet");<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    }<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    // If Crypto AES is enabled, return cryptoInputStream which unwrap the data with Crypto AES.<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    if (cryptoAesEnable &amp;&amp; cryptoInputStream != null) {<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      return cryptoInputStream;<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    }<a name="line.201"></a>
-<span class="sourceLineNo">202</span>    return saslInputStream;<a name="line.202"></a>
-<span class="sourceLineNo">203</span>  }<a name="line.203"></a>
-<span class="sourceLineNo">204</span><a name="line.204"></a>
-<span class="sourceLineNo">205</span>  class WrappedInputStream extends FilterInputStream {<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    private ByteBuffer unwrappedRpcBuffer = ByteBuffer.allocate(0);<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    public WrappedInputStream(InputStream in) throws IOException {<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      super(in);<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    }<a name="line.209"></a>
-<span class="sourceLineNo">210</span><a name="line.210"></a>
-<span class="sourceLineNo">211</span>    @Override<a name="line.211"></a>
-<span class="sourceLineNo">212</span>    public int read() throws IOException {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      byte[] b = new byte[1];<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      int n = read(b, 0, 1);<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      return (n != -1) ? b[0] : -1;<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    }<a name="line.216"></a>
-<span class="sourceLineNo">217</span><a name="line.217"></a>
-<span class="sourceLineNo">218</span>    @Override<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    public int read(byte b[]) throws IOException {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      return read(b, 0, b.length);<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    }<a name="line.221"></a>
-<span class="sourceLineNo">222</span><a name="line.222"></a>
-<span class="sourceLineNo">223</span>    @Override<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    public synchronized int read(byte[] buf, int off, int len) throws IOException {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      // fill the buffer with the next RPC message<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      if (unwrappedRpcBuffer.remaining() == 0) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        readNextRpcPacket();<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
-<span class="sourceLineNo">229</span>      // satisfy as much of the request as possible<a name="line.229"></a>
-<span class="sourceLineNo">230</span>      int readLen = Math.min(len, unwrappedRpcBuffer.remaining());<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      unwrappedRpcBuffer.get(buf, off, readLen);<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      return readLen;<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    }<a name="line.233"></a>
-<span class="sourceLineNo">234</span><a name="line.234"></a>
-<span class="sourceLineNo">235</span>    // unwrap messages with Crypto AES<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    private void readNextRpcPacket() throws IOException {<a name="line.236"></a>
-<span class="sourceLineNo">237</span>      LOG.debug("reading next wrapped RPC packet");<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      DataInputStream dis = new DataInputStream(in);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      int rpcLen = dis.readInt();<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      byte[] rpcBuf = new byte[rpcLen];<a name="line.240"></a>
-<span class="sourceLineNo">241</span>      dis.readFully(rpcBuf);<a name="line.241"></a>
-<span class="sourceLineNo">242</span><a name="line.242"></a>
-<span class="sourceLineNo">243</span>      // unwrap with Crypto AES<a name="line.243"></a>
-<span class="sourceLineNo">244</span>      rpcBuf = cryptoAES.unwrap(rpcBuf, 0, rpcBuf.length);<a name="line.244"></a>
-<span class="sourceLineNo">245</span>      if (LOG.isDebugEnabled()) {<a name="line.245"></a>
-<span class="sourceLineNo">246</span>        LOG.debug("unwrapping token of length:" + rpcBuf.length);<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      }<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      unwrappedRpcBuffer = ByteBuffer.wrap(rpcBuf);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    }<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  }<a name="line.250"></a>
-<span class="sourceLineNo">251</span><a name="line.251"></a>
-<span class="sourceLineNo">252</span>  /**<a name="line.252"></a>
-<span class="sourceLineNo">253</span>   * Get a SASL wrapped OutputStream. Can be called only after saslConnect() has been called.<a name="line.253"></a>
-<span class="sourceLineNo">254</span>   * @return a SASL wrapped OutputStream<a name="line.254"></a>
-<span class="sourceLineNo">255</span>   * @throws IOException<a name="line.255"></a>
-<span class="sourceLineNo">256</span>   */<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  public OutputStream getOutputStream() throws IOException {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    if (!saslClient.isComplete()) {<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      throw new IOException("Sasl authentication exchange hasn't completed yet");<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    }<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    // If Crypto AES is enabled, return cryptoOutputStream which wrap the data with Crypto AES.<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    if (cryptoAesEnable &amp;&amp; cryptoOutputStream != null) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      return cryptoOutputStream;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    return saslOutputStream;<a name="line.265"></a>
-<span class="sourceLineNo">266</span>  }<a name="line.266"></a>
-<span class="sourceLineNo">267</span><a name="line.267"></a>
-<span class="sourceLineNo">268</span>  class WrappedOutputStream extends FilterOutputStream {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    public WrappedOutputStream(OutputStream out) throws IOException {<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      super(out);<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    }<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    @Override<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    public void write(byte[] buf, int off, int len) throws IOException {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      if (LOG.isDebugEnabled()) {<a name="line.274"></a>
-<span class="sourceLineNo">275</span>        LOG.debug("wrapping token of length:" + len);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      }<a name="line.276"></a>
+<span class="sourceLineNo">155</span>      catch (IOException e){<a name="line.155"></a>
+<span class="sourceLineNo">156</span>        if(e instanceof RemoteException){<a name="line.156"></a>
+<span class="sourceLineNo">157</span>          LOG.debug("Sasl connection failed: ", e);<a name="line.157"></a>
+<span class="sourceLineNo">158</span>          throw e;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>        }<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      }<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      if (LOG.isDebugEnabled()) {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>        LOG.debug("SASL client context established. Negotiated QoP: "<a name="line.162"></a>
+<span class="sourceLineNo">163</span>            + saslClient.getNegotiatedProperty(Sasl.QOP));<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      }<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      // initial the inputStream, outputStream for both Sasl encryption<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      // and Crypto AES encryption if necessary<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      // if Crypto AES encryption enabled, the saslInputStream/saslOutputStream is<a name="line.167"></a>
+<span class="sourceLineNo">168</span>      // only responsible for connection header negotiation,<a name="line.168"></a>
+<span class="sourceLineNo">169</span>      // cryptoInputStream/cryptoOutputStream is responsible for rpc encryption with Crypto AES<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      saslInputStream = new SaslInputStream(inS, saslClient);<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      saslOutputStream = new SaslOutputStream(outS, saslClient);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>      if (initStreamForCrypto) {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>        cryptoInputStream = new WrappedInputStream(inS);<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        cryptoOutputStream = new WrappedOutputStream(outS);<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      }<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>      return true;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    } catch (IOException e) {<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      try {<a name="line.179"></a>
+<span class="sourceLineNo">180</span>        saslClient.dispose();<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      } catch (SaslException ignored) {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>        // ignore further exceptions during cleanup<a name="line.182"></a>
+<span class="sourceLineNo">183</span>      }<a name="line.183"></a>
+<span class="sourceLineNo">184</span>      throw e;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    }<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  public String getSaslQOP() {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    return (String) saslClient.getNegotiatedProperty(Sasl.QOP);<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  }<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>  public void initCryptoCipher(RPCProtos.CryptoCipherMeta cryptoCipherMeta,<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      Configuration conf) throws IOException {<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    // create SaslAES for client<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    cryptoAES = EncryptionUtil.createCryptoAES(cryptoCipherMeta, conf);<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    cryptoAesEnable = true;<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  }<a name="line.197"></a>
+<span class="sourceLineNo">198</span><a name="line.198"></a>
+<span class="sourceLineNo">199</span>  /**<a name="line.199"></a>
+<span class="sourceLineNo">200</span>   * Get a SASL wrapped InputStream. Can be called only after saslConnect() has been called.<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * @return a SASL wrapped InputStream<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @throws IOException<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   */<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  public InputStream getInputStream() throws IOException {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    if (!saslClient.isComplete()) {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      throw new IOException("Sasl authentication exchange hasn't completed yet");<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    }<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    // If Crypto AES is enabled, return cryptoInputStream which unwrap the data with Crypto AES.<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    if (cryptoAesEnable &amp;&amp; cryptoInputStream != null) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      return cryptoInputStream;<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    }<a name="line.211"></a>
+<span class="sourceLineNo">212</span>    return saslInputStream;<a name="line.212"></a>
+<span class="sourceLineNo">213</span>  }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>  class WrappedInputStream extends FilterInputStream {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    private ByteBuffer unwrappedRpcBuffer = ByteBuffer.allocate(0);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    public WrappedInputStream(InputStream in) throws IOException {<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      super(in);<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    }<a name="line.219"></a>
+<span class="sourceLineNo">220</span><a name="line.220"></a>
+<span class="sourceLineNo">221</span>    @Override<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    public int read() throws IOException {<a name="line.222"></a>
+<span class="sourceLineNo">223</span>      byte[] b = new byte[1];<a name="line.223"></a>
+<span class="sourceLineNo">224</span>      int n = read(b, 0, 1);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>      return (n != -1) ? b[0] : -1;<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    }<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>    @Override<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    public int read(byte b[]) throws IOException {<a name="line.229"></a>
+<span class="sourceLineNo">230</span>      return read(b, 0, b.length);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    }<a name="line.231"></a>
+<span class="sourceLineNo">232</span><a name="line.232"></a>
+<span class="sourceLineNo">233</span>    @Override<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    public synchronized int read(byte[] buf, int off, int len) throws IOException {<a name="line.234"></a>
+<span class="sourceLineNo">235</span>      // fill the buffer with the next RPC message<a name="line.235"></a>
+<span class="sourceLineNo">236</span>      if (unwrappedRpcBuffer.remaining() == 0) {<a name="line.236"></a>
+<span class="sourceLineNo">237</span>        readNextRpcPacket();<a name="line.237"></a>
+<span class="sourceLineNo">238</span>      }<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      // satisfy as much of the request as possible<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      int readLen = Math.min(len, unwrappedRpcBuffer.remaining());<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      unwrappedRpcBuffer.get(buf, off, readLen);<a name="line.241"></a>
+<span class="sourceLineNo">242</span>      return readLen;<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    }<a name="line.243"></a>
+<span class="sourceLineNo">244</span><a name="line.244"></a>
+<span class="sourceLineNo">245</span>    // unwrap messages with Crypto AES<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    private void readNextRpcPacket() throws IOException {<a name="line.246"></a>
+<span class="sourceLineNo">247</span>      LOG.debug("reading next wrapped RPC packet");<a name="line.247"></a>
+<span class="sourceLineNo">248</span>      DataInputStream dis = new DataInputStream(in);<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      int rpcLen = dis.readInt();<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      byte[] rpcBuf = new byte[rpcLen];<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      dis.readFully(rpcBuf);<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>      // unwrap with Crypto AES<a name="line.253"></a>
+<span class="sourceLineNo">254</span>      rpcBuf = cryptoAES.unwrap(rpcBuf, 0, rpcBuf.length);<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      if (LOG.isDebugEnabled()) {<a name="line.255"></a>
+<span class="sourceLineNo">256</span>        LOG.debug("unwrapping token of length:" + rpcBuf.length);<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      }<a name="line.257"></a>
+<span class="sourceLineNo">258</span>      unwrappedRpcBuffer = ByteBuffer.wrap(rpcBuf);<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    }<a name="line.259"></a>
+<span class="sourceLineNo">260</span>  }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>  /**<a name="line.262"></a>
+<span class="sourceLineNo">263</span>   * Get a SASL wrapped OutputStream. Can be called only after saslConnect() has been called.<a name="line.263"></a>
+<span class="sourceLineNo">264</span>   * @return a SASL wrapped OutputStream<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * @throws IOException<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   */<a name="line.266"></a>
+<span class="sourceLineNo">267</span>  public OutputStream getOutputStream() throws IOException {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    if (!saslClient.isComplete()) {<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      throw new IOException("Sasl authentication exchange hasn't completed yet");<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    }<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    // If Crypto AES is enabled, return cryptoOutputStream which wrap the data with Crypto AES.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    if (cryptoAesEnable &amp;&amp; cryptoOutputStream != null) {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>      return cryptoOutputStream;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    }<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    return saslOutputStream;<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  }<a name="line.276"></a>
 <span class="sourceLineNo">277</span><a name="line.277"></a>
-<span class="sourceLineNo">278</span>      // wrap with Crypto AES<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      byte[] wrapped = cryptoAES.wrap(buf, off, len);<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      DataOutputStream dob = new DataOutputStream(out);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      dob.writeInt(wrapped.length);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      dob.write(wrapped, 0, wrapped.length);<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      dob.flush();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    }<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  }<a name="line.285"></a>
-<span class="sourceLineNo">286</span>}<a name="line.286"></a>
+<span class="sourceLineNo">278</span>  class WrappedOutputStream extends FilterOutputStream {<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    public WrappedOutputStream(OutputStream out) throws IOException {<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      super(out);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>    }<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    @Override<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    public void write(byte[] buf, int off, int len) throws IOException {<a name="line.283"></a>
+<span class="sourceLineNo">284</span>      if (LOG.isDebugEnabled()) {<a name="line.284"></a>
+<span class="sourceLineNo">285</span>        LOG.debug("wrapping token of length:" + len);<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>      // wrap with Crypto AES<a name="line.288"></a>
+<span class="sourceLineNo">289</span>      byte[] wrapped = cryptoAES.wrap(buf, off, len);<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      DataOutputStream dob = new DataOutputStream(out);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      dob.writeInt(wrapped.length);<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      dob.write(wrapped, 0, wrapped.length);<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      dob.flush();<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    }<a name="line.294"></a>
+<span class="sourceLineNo">295</span>  }<a name="line.295"></a>
+<span class="sourceLineNo">296</span>}<a name="line.296"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html
index 95c33a9..f3392c7 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/BoundedRecoveredHFilesOutputSink.html
@@ -6,7 +6,7 @@
 </head>
 <body>
 <div class="sourceContainer">
-<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a>
+<pre><span class="sourceLineNo">001</span>/*<a name="line.1"></a>
 <span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a>
 <span class="sourceLineNo">003</span> * or more contributor license agreements.  See the NOTICE file<a name="line.3"></a>
 <span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a>
@@ -26,220 +26,225 @@
 <span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.wal;<a name="line.18"></a>
 <span class="sourceLineNo">019</span><a name="line.19"></a>
 <span class="sourceLineNo">020</span>import static org.apache.hadoop.hbase.TableName.META_TABLE_NAME;<a name="line.20"></a>
-<span class="sourceLineNo">021</span><a name="line.21"></a>
-<span class="sourceLineNo">022</span>import java.io.IOException;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import java.io.InterruptedIOException;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import java.util.HashMap;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import java.util.List;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import java.util.Map;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import java.util.concurrent.ConcurrentMap;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.util.concurrent.ExecutionException;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import java.util.concurrent.Future;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.31"></a>
-<span class="sourceLineNo">032</span><a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.fs.Path;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.Cell;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.TableName;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.io.hfile.CacheConfig;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.io.hfile.HFileContext;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.regionserver.CellSet;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.regionserver.HStore;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.regionserver.StoreFileWriter;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.wal.EntryBuffers.RegionEntryBuffer;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.slf4j.Logger;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.slf4j.LoggerFactory;<a name="line.52"></a>
-<span class="sourceLineNo">053</span><a name="line.53"></a>
-<span class="sourceLineNo">054</span>@InterfaceAudience.Private<a name="line.54"></a>
-<span class="sourceLineNo">055</span>public class BoundedRecoveredHFilesOutputSink extends OutputSink {<a name="line.55"></a>
-<span class="sourceLineNo">056</span>  private static final Logger LOG = LoggerFactory.getLogger(BoundedRecoveredHFilesOutputSink.class);<a name="line.56"></a>
-<span class="sourceLineNo">057</span><a name="line.57"></a>
-<span class="sourceLineNo">058</span>  public static final String WAL_SPLIT_TO_HFILE = "hbase.wal.split.to.hfile";<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  public static final boolean DEFAULT_WAL_SPLIT_TO_HFILE = false;<a name="line.59"></a>
-<span class="sourceLineNo">060</span><a name="line.60"></a>
-<span class="sourceLineNo">061</span>  private final WALSplitter walSplitter;<a name="line.61"></a>
-<span class="sourceLineNo">062</span><a name="line.62"></a>
-<span class="sourceLineNo">063</span>  // Since the splitting process may create multiple output files, we need a map<a name="line.63"></a>
-<span class="sourceLineNo">064</span>  // to track the output count of each region.<a name="line.64"></a>
-<span class="sourceLineNo">065</span>  private ConcurrentMap&lt;String, Long&gt; regionEditsWrittenMap = new ConcurrentHashMap&lt;&gt;();<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  // Need a counter to track the opening writers.<a name="line.66"></a>
-<span class="sourceLineNo">067</span>  private final AtomicInteger openingWritersNum = new AtomicInteger(0);<a name="line.67"></a>
-<span class="sourceLineNo">068</span><a name="line.68"></a>
-<span class="sourceLineNo">069</span>  private final ConcurrentMap&lt;TableName, TableDescriptor&gt; tableDescCache;<a name="line.69"></a>
-<span class="sourceLineNo">070</span><a name="line.70"></a>
-<span class="sourceLineNo">071</span>  public BoundedRecoveredHFilesOutputSink(WALSplitter walSplitter,<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    WALSplitter.PipelineController controller, EntryBuffers entryBuffers, int numWriters) {<a name="line.72"></a>
-<span class="sourceLineNo">073</span>    super(controller, entryBuffers, numWriters);<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    this.walSplitter = walSplitter;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    this.tableDescCache = new ConcurrentHashMap&lt;&gt;();<a name="line.75"></a>
-<span class="sourceLineNo">076</span>  }<a name="line.76"></a>
-<span class="sourceLineNo">077</span><a name="line.77"></a>
-<span class="sourceLineNo">078</span>  @Override<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  void append(RegionEntryBuffer buffer) throws IOException {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>    Map&lt;String, CellSet&gt; familyCells = new HashMap&lt;&gt;();<a name="line.80"></a>
-<span class="sourceLineNo">081</span>    Map&lt;String, Long&gt; familySeqIds = new HashMap&lt;&gt;();<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    boolean isMetaTable = buffer.tableName.equals(META_TABLE_NAME);<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    for (WAL.Entry entry : buffer.entryBuffer) {<a name="line.83"></a>
-<span class="sourceLineNo">084</span>      long seqId = entry.getKey().getSequenceId();<a name="line.84"></a>
-<span class="sourceLineNo">085</span>      List&lt;Cell&gt; cells = entry.getEdit().getCells();<a name="line.85"></a>
-<span class="sourceLineNo">086</span>      for (Cell cell : cells) {<a name="line.86"></a>
-<span class="sourceLineNo">087</span>        if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) {<a name="line.87"></a>
-<span class="sourceLineNo">088</span>          continue;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>        }<a name="line.89"></a>
-<span class="sourceLineNo">090</span>        PrivateCellUtil.setSequenceId(cell, seqId);<a name="line.90"></a>
-<span class="sourceLineNo">091</span>        String familyName = Bytes.toString(CellUtil.cloneFamily(cell));<a name="line.91"></a>
-<span class="sourceLineNo">092</span>        // comparator need to be specified for meta<a name="line.92"></a>
-<span class="sourceLineNo">093</span>        familyCells<a name="line.93"></a>
-<span class="sourceLineNo">094</span>            .computeIfAbsent(familyName,<a name="line.94"></a>
-<span class="sourceLineNo">095</span>              key -&gt; new CellSet(<a name="line.95"></a>
-<span class="sourceLineNo">096</span>                  isMetaTable ? CellComparatorImpl.META_COMPARATOR : CellComparatorImpl.COMPARATOR))<a name="line.96"></a>
-<span class="sourceLineNo">097</span>            .add(cell);<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        familySeqIds.compute(familyName, (k, v) -&gt; v == null ? seqId : Math.max(v, seqId));<a name="line.98"></a>
-<span class="sourceLineNo">099</span>      }<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    }<a name="line.100"></a>
-<span class="sourceLineNo">101</span><a name="line.101"></a>
-<span class="sourceLineNo">102</span>    // The key point is create a new writer for each column family, write edits then close writer.<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    String regionName = Bytes.toString(buffer.encodedRegionName);<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    for (Map.Entry&lt;String, CellSet&gt; cellsEntry : familyCells.entrySet()) {<a name="line.104"></a>
-<span class="sourceLineNo">105</span>      String familyName = cellsEntry.getKey();<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      StoreFileWriter writer = createRecoveredHFileWriter(buffer.tableName, regionName,<a name="line.106"></a>
-<span class="sourceLineNo">107</span>        familySeqIds.get(familyName), familyName, isMetaTable);<a name="line.107"></a>
-<span class="sourceLineNo">108</span>      openingWritersNum.incrementAndGet();<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      try {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>        for (Cell cell : cellsEntry.getValue()) {<a name="line.110"></a>
-<span class="sourceLineNo">111</span>          writer.append(cell);<a name="line.111"></a>
-<span class="sourceLineNo">112</span>        }<a name="line.112"></a>
-<span class="sourceLineNo">113</span>        // Append the max seqid to hfile, used when recovery.<a name="line.113"></a>
-<span class="sourceLineNo">114</span>        writer.appendMetadata(familySeqIds.get(familyName), false);<a name="line.114"></a>
-<span class="sourceLineNo">115</span>        regionEditsWrittenMap.compute(Bytes.toString(buffer.encodedRegionName),<a name="line.115"></a>
-<span class="sourceLineNo">116</span>          (k, v) -&gt; v == null ? buffer.entryBuffer.size() : v + buffer.entryBuffer.size());<a name="line.116"></a>
-<span class="sourceLineNo">117</span>        splits.add(writer.getPath());<a name="line.117"></a>
-<span class="sourceLineNo">118</span>        openingWritersNum.decrementAndGet();<a name="line.118"></a>
-<span class="sourceLineNo">119</span>      } finally {<a name="line.119"></a>
-<span class="sourceLineNo">120</span>        writer.close();<a name="line.120"></a>
-<span class="sourceLineNo">121</span>      }<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    }<a name="line.122"></a>
-<span class="sourceLineNo">123</span>  }<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>  @Override<a name="line.125"></a>
-<span class="sourceLineNo">126</span>  public List&lt;Path&gt; close() throws IOException {<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    boolean isSuccessful = true;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    try {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>      isSuccessful &amp;= finishWriterThreads();<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    } finally {<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      isSuccessful &amp;= writeRemainingEntryBuffers();<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    }<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    return isSuccessful ? splits : null;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  }<a name="line.134"></a>
-<span class="sourceLineNo">135</span><a name="line.135"></a>
-<span class="sourceLineNo">136</span>  /**<a name="line.136"></a>
-<span class="sourceLineNo">137</span>   * Write out the remaining RegionEntryBuffers and close the writers.<a name="line.137"></a>
-<span class="sourceLineNo">138</span>   *<a name="line.138"></a>
-<span class="sourceLineNo">139</span>   * @return true when there is no error.<a name="line.139"></a>
-<span class="sourceLineNo">140</span>   */<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  private boolean writeRemainingEntryBuffers() throws IOException {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    for (EntryBuffers.RegionEntryBuffer buffer : entryBuffers.buffers.values()) {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      closeCompletionService.submit(() -&gt; {<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        append(buffer);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>        return null;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      });<a name="line.146"></a>
-<span class="sourceLineNo">147</span>    }<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    boolean progressFailed = false;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    try {<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      for (int i = 0, n = entryBuffers.buffers.size(); i &lt; n; i++) {<a name="line.150"></a>
-<span class="sourceLineNo">151</span>        Future&lt;Void&gt; future = closeCompletionService.take();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>        future.get();<a name="line.152"></a>
-<span class="sourceLineNo">153</span>        if (!progressFailed &amp;&amp; reporter != null &amp;&amp; !reporter.progress()) {<a name="line.153"></a>
-<span class="sourceLineNo">154</span>          progressFailed = true;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>        }<a name="line.155"></a>
-<span class="sourceLineNo">156</span>      }<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    } catch (InterruptedException e) {<a name="line.157"></a>
-<span class="sourceLineNo">158</span>      IOException iie = new InterruptedIOException();<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      iie.initCause(e);<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      throw iie;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    } catch (ExecutionException e) {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      throw new IOException(e.getCause());<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    } finally {<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      closeThreadPool.shutdownNow();<a name="line.164"></a>
-<span class="sourceLineNo">165</span>    }<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    return !progressFailed;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  }<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>  @Override<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  public Map&lt;String, Long&gt; getOutputCounts() {<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    return regionEditsWrittenMap;<a name="line.171"></a>
+<span class="sourceLineNo">021</span>import java.io.IOException;<a name="line.21"></a>
+<span class="sourceLineNo">022</span>import java.io.InterruptedIOException;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import java.util.HashMap;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import java.util.List;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import java.util.Map;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import java.util.concurrent.ConcurrentMap;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import java.util.concurrent.ExecutionException;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import java.util.concurrent.Future;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.fs.Path;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.Cell;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.TableName;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.io.hfile.CacheConfig;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.io.hfile.HFileContext;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.regionserver.CellSet;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.regionserver.HStore;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.regionserver.StoreFileWriter;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.wal.EntryBuffers.RegionEntryBuffer;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.slf4j.Logger;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.slf4j.LoggerFactory;<a name="line.50"></a>
+<span class="sourceLineNo">051</span><a name="line.51"></a>
+<span class="sourceLineNo">052</span>/**<a name="line.52"></a>
+<span class="sourceLineNo">053</span> * A WALSplitter sink that outputs {@link org.apache.hadoop.hbase.io.hfile.HFile}s.<a name="line.53"></a>
+<span class="sourceLineNo">054</span> * Runs with a bounded number of HFile writers at any one time rather than let the count run up.<a name="line.54"></a>
+<span class="sourceLineNo">055</span> * @see BoundedRecoveredEditsOutputSink for a sink implementation that writes intermediate<a name="line.55"></a>
+<span class="sourceLineNo">056</span> *   recovered.edits files.<a name="line.56"></a>
+<span class="sourceLineNo">057</span> */<a name="line.57"></a>
+<span class="sourceLineNo">058</span>@InterfaceAudience.Private<a name="line.58"></a>
+<span class="sourceLineNo">059</span>public class BoundedRecoveredHFilesOutputSink extends OutputSink {<a name="line.59"></a>
+<span class="sourceLineNo">060</span>  private static final Logger LOG = LoggerFactory.getLogger(BoundedRecoveredHFilesOutputSink.class);<a name="line.60"></a>
+<span class="sourceLineNo">061</span><a name="line.61"></a>
+<span class="sourceLineNo">062</span>  private final WALSplitter walSplitter;<a name="line.62"></a>
+<span class="sourceLineNo">063</span><a name="line.63"></a>
+<span class="sourceLineNo">064</span>  // Since the splitting process may create multiple output files, we need a map<a name="line.64"></a>
+<span class="sourceLineNo">065</span>  // to track the output count of each region.<a name="line.65"></a>
+<span class="sourceLineNo">066</span>  private ConcurrentMap&lt;String, Long&gt; regionEditsWrittenMap = new ConcurrentHashMap&lt;&gt;();<a name="line.66"></a>
+<span class="sourceLineNo">067</span>  // Need a counter to track the opening writers.<a name="line.67"></a>
+<span class="sourceLineNo">068</span>  private final AtomicInteger openingWritersNum = new AtomicInteger(0);<a name="line.68"></a>
+<span class="sourceLineNo">069</span><a name="line.69"></a>
+<span class="sourceLineNo">070</span>  private final ConcurrentMap&lt;TableName, TableDescriptor&gt; tableDescCache;<a name="line.70"></a>
+<span class="sourceLineNo">071</span><a name="line.71"></a>
+<span class="sourceLineNo">072</span>  public BoundedRecoveredHFilesOutputSink(WALSplitter walSplitter,<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    WALSplitter.PipelineController controller, EntryBuffers entryBuffers, int numWriters) {<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    super(controller, entryBuffers, numWriters);<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    this.walSplitter = walSplitter;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    this.tableDescCache = new ConcurrentHashMap&lt;&gt;();<a name="line.76"></a>
+<span class="sourceLineNo">077</span>  }<a name="line.77"></a>
+<span class="sourceLineNo">078</span><a name="line.78"></a>
+<span class="sourceLineNo">079</span>  @Override<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  void append(RegionEntryBuffer buffer) throws IOException {<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    Map&lt;String, CellSet&gt; familyCells = new HashMap&lt;&gt;();<a name="line.81"></a>
+<span class="sourceLineNo">082</span>    Map&lt;String, Long&gt; familySeqIds = new HashMap&lt;&gt;();<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    boolean isMetaTable = buffer.tableName.equals(META_TABLE_NAME);<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    // First iterate all Cells to find which column families are present and to stamp Cell with<a name="line.84"></a>
+<span class="sourceLineNo">085</span>    // sequence id.<a name="line.85"></a>
+<span class="sourceLineNo">086</span>    for (WAL.Entry entry : buffer.entryBuffer) {<a name="line.86"></a>
+<span class="sourceLineNo">087</span>      long seqId = entry.getKey().getSequenceId();<a name="line.87"></a>
+<span class="sourceLineNo">088</span>      List&lt;Cell&gt; cells = entry.getEdit().getCells();<a name="line.88"></a>
+<span class="sourceLineNo">089</span>      for (Cell cell : cells) {<a name="line.89"></a>
+<span class="sourceLineNo">090</span>        if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) {<a name="line.90"></a>
+<span class="sourceLineNo">091</span>          continue;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>        }<a name="line.92"></a>
+<span class="sourceLineNo">093</span>        PrivateCellUtil.setSequenceId(cell, seqId);<a name="line.93"></a>
+<span class="sourceLineNo">094</span>        String familyName = Bytes.toString(CellUtil.cloneFamily(cell));<a name="line.94"></a>
+<span class="sourceLineNo">095</span>        // comparator need to be specified for meta<a name="line.95"></a>
+<span class="sourceLineNo">096</span>        familyCells<a name="line.96"></a>
+<span class="sourceLineNo">097</span>            .computeIfAbsent(familyName,<a name="line.97"></a>
+<span class="sourceLineNo">098</span>              key -&gt; new CellSet(<a name="line.98"></a>
+<span class="sourceLineNo">099</span>                  isMetaTable ? CellComparatorImpl.META_COMPARATOR : CellComparatorImpl.COMPARATOR))<a name="line.99"></a>
+<span class="sourceLineNo">100</span>            .add(cell);<a name="line.100"></a>
+<span class="sourceLineNo">101</span>        familySeqIds.compute(familyName, (k, v) -&gt; v == null ? seqId : Math.max(v, seqId));<a name="line.101"></a>
+<span class="sourceLineNo">102</span>      }<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    }<a name="line.103"></a>
+<span class="sourceLineNo">104</span><a name="line.104"></a>
+<span class="sourceLineNo">105</span>    // Create a new hfile writer for each column family, write edits then close writer.<a name="line.105"></a>
+<span class="sourceLineNo">106</span>    String regionName = Bytes.toString(buffer.encodedRegionName);<a name="line.106"></a>
+<span class="sourceLineNo">107</span>    for (Map.Entry&lt;String, CellSet&gt; cellsEntry : familyCells.entrySet()) {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>      String familyName = cellsEntry.getKey();<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      StoreFileWriter writer = createRecoveredHFileWriter(buffer.tableName, regionName,<a name="line.109"></a>
+<span class="sourceLineNo">110</span>        familySeqIds.get(familyName), familyName, isMetaTable);<a name="line.110"></a>
+<span class="sourceLineNo">111</span>      LOG.trace("Created {}", writer.getPath());<a name="line.111"></a>
+<span class="sourceLineNo">112</span>      openingWritersNum.incrementAndGet();<a name="line.112"></a>
+<span class="sourceLineNo">113</span>      try {<a name="line.113"></a>
+<span class="sourceLineNo">114</span>        for (Cell cell : cellsEntry.getValue()) {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>          writer.append(cell);<a name="line.115"></a>
+<span class="sourceLineNo">116</span>        }<a name="line.116"></a>
+<span class="sourceLineNo">117</span>        // Append the max seqid to hfile, used when recovery.<a name="line.117"></a>
+<span class="sourceLineNo">118</span>        writer.appendMetadata(familySeqIds.get(familyName), false);<a name="line.118"></a>
+<span class="sourceLineNo">119</span>        regionEditsWrittenMap.compute(Bytes.toString(buffer.encodedRegionName),<a name="line.119"></a>
+<span class="sourceLineNo">120</span>          (k, v) -&gt; v == null ? buffer.entryBuffer.size() : v + buffer.entryBuffer.size());<a name="line.120"></a>
+<span class="sourceLineNo">121</span>        splits.add(writer.getPath());<a name="line.121"></a>
+<span class="sourceLineNo">122</span>        openingWritersNum.decrementAndGet();<a name="line.122"></a>
+<span class="sourceLineNo">123</span>      } finally {<a name="line.123"></a>
+<span class="sourceLineNo">124</span>        writer.close();<a name="line.124"></a>
+<span class="sourceLineNo">125</span>        LOG.trace("Closed {}, edits={}", writer.getPath(), familyCells.size());<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      }<a name="line.126"></a>
+<span class="sourceLineNo">127</span>    }<a name="line.127"></a>
+<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
+<span class="sourceLineNo">129</span><a name="line.129"></a>
+<span class="sourceLineNo">130</span>  @Override<a name="line.130"></a>
+<span class="sourceLineNo">131</span>  public List&lt;Path&gt; close() throws IOException {<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    boolean isSuccessful = true;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    try {<a name="line.133"></a>
+<span class="sourceLineNo">134</span>      isSuccessful &amp;= finishWriterThreads();<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    } finally {<a name="line.135"></a>
+<span class="sourceLineNo">136</span>      isSuccessful &amp;= writeRemainingEntryBuffers();<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    }<a name="line.137"></a>
+<span class="sourceLineNo">138</span>    return isSuccessful ? splits : null;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>  }<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>  /**<a name="line.141"></a>
+<span class="sourceLineNo">142</span>   * Write out the remaining RegionEntryBuffers and close the writers.<a name="line.142"></a>
+<span class="sourceLineNo">143</span>   *<a name="line.143"></a>
+<span class="sourceLineNo">144</span>   * @return true when there is no error.<a name="line.144"></a>
+<span class="sourceLineNo">145</span>   */<a name="line.145"></a>
+<span class="sourceLineNo">146</span>  private boolean writeRemainingEntryBuffers() throws IOException {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    for (EntryBuffers.RegionEntryBuffer buffer : entryBuffers.buffers.values()) {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      closeCompletionService.submit(() -&gt; {<a name="line.148"></a>
+<span class="sourceLineNo">149</span>        append(buffer);<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        return null;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      });<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    }<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    boolean progressFailed = false;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    try {<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      for (int i = 0, n = entryBuffers.buffers.size(); i &lt; n; i++) {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>        Future&lt;Void&gt; future = closeCompletionService.take();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>        future.get();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>        if (!progressFailed &amp;&amp; reporter != null &amp;&amp; !reporter.progress()) {<a name="line.158"></a>
+<span class="sourceLineNo">159</span>          progressFailed = true;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>        }<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      }<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    } catch (InterruptedException e) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      IOException iie = new InterruptedIOException();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      iie.initCause(e);<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      throw iie;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    } catch (ExecutionException e) {<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      throw new IOException(e.getCause());<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    } finally {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>      closeThreadPool.shutdownNow();<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    }<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    return !progressFailed;<a name="line.171"></a>
 <span class="sourceLineNo">172</span>  }<a name="line.172"></a>
 <span class="sourceLineNo">173</span><a name="line.173"></a>
 <span class="sourceLineNo">174</span>  @Override<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  public int getNumberOfRecoveredRegions() {<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    return regionEditsWrittenMap.size();<a name="line.176"></a>
+<span class="sourceLineNo">175</span>  public Map&lt;String, Long&gt; getOutputCounts() {<a name="line.175"></a>
+<span class="sourceLineNo">176</span>    return regionEditsWrittenMap;<a name="line.176"></a>
 <span class="sourceLineNo">177</span>  }<a name="line.177"></a>
 <span class="sourceLineNo">178</span><a name="line.178"></a>
 <span class="sourceLineNo">179</span>  @Override<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  int getNumOpenWriters() {<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    return openingWritersNum.get();<a name="line.181"></a>
+<span class="sourceLineNo">180</span>  public int getNumberOfRecoveredRegions() {<a name="line.180"></a>
+<span class="sourceLineNo">181</span>    return regionEditsWrittenMap.size();<a name="line.181"></a>
 <span class="sourceLineNo">182</span>  }<a name="line.182"></a>
 <span class="sourceLineNo">183</span><a name="line.183"></a>
 <span class="sourceLineNo">184</span>  @Override<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  boolean keepRegionEvent(Entry entry) {<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    return false;<a name="line.186"></a>
+<span class="sourceLineNo">185</span>  int getNumOpenWriters() {<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    return openingWritersNum.get();<a name="line.186"></a>
 <span class="sourceLineNo">187</span>  }<a name="line.187"></a>
 <span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>  private StoreFileWriter createRecoveredHFileWriter(TableName tableName, String regionName,<a name="line.189"></a>
-<span class="sourceLineNo">190</span>      long seqId, String familyName, boolean isMetaTable) throws IOException {<a name="line.190"></a>
-<span class="sourceLineNo">191</span>    Path outputDir = WALSplitUtil.tryCreateRecoveredHFilesDir(walSplitter.rootFS, walSplitter.conf,<a name="line.191"></a>
-<span class="sourceLineNo">192</span>      tableName, regionName, familyName);<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    StoreFileWriter.Builder writerBuilder =<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        new StoreFileWriter.Builder(walSplitter.conf, CacheConfig.DISABLED, walSplitter.rootFS)<a name="line.194"></a>
-<span class="sourceLineNo">195</span>            .withOutputDir(outputDir);<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>    TableDescriptor tableDesc =<a name="line.197"></a>
-<span class="sourceLineNo">198</span>        tableDescCache.computeIfAbsent(tableName, t -&gt; getTableDescriptor(t));<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    if (tableDesc == null) {<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      throw new IOException("Failed to get table descriptor for table " + tableName);<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    }<a name="line.201"></a>
-<span class="sourceLineNo">202</span>    ColumnFamilyDescriptor cfd = tableDesc.getColumnFamily(Bytes.toBytesBinary(familyName));<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    HFileContext hFileContext = createFileContext(cfd, isMetaTable);<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    return writerBuilder.withFileContext(hFileContext).withBloomType(cfd.getBloomFilterType())<a name="line.204"></a>
-<span class="sourceLineNo">205</span>        .build();<a name="line.205"></a>
-<span class="sourceLineNo">206</span>  }<a name="line.206"></a>
-<span class="sourceLineNo">207</span><a name="line.207"></a>
-<span class="sourceLineNo">208</span>  private HFileContext createFileContext(ColumnFamilyDescriptor cfd, boolean isMetaTable)<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      throws IOException {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>    return new HFileContextBuilder().withCompression(cfd.getCompressionType())<a name="line.210"></a>
-<span class="sourceLineNo">211</span>        .withChecksumType(HStore.getChecksumType(walSplitter.conf))<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        .withBytesPerCheckSum(HStore.getBytesPerChecksum(walSplitter.conf))<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        .withBlockSize(cfd.getBlocksize()).withCompressTags(cfd.isCompressTags())<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        .withDataBlockEncoding(cfd.getDataBlockEncoding()).withCellComparator(<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          isMetaTable ? CellComparatorImpl.META_COMPARATOR : CellComparatorImpl.COMPARATOR)<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        .build();<a name="line.216"></a>
-<span class="sourceLineNo">217</span>  }<a name="line.217"></a>
-<span class="sourceLineNo">218</span><a name="line.218"></a>
-<span class="sourceLineNo">219</span>  private TableDescriptor getTableDescriptor(TableName tableName) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>    if (walSplitter.rsServices != null) {<a name="line.220"></a>
-<span class="sourceLineNo">221</span>      try {<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        return walSplitter.rsServices.getConnection().getAdmin().getDescriptor(tableName);<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      } catch (IOException e) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>        LOG.warn("Failed to get table descriptor for table {}", tableName, e);<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      }<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    }<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    try {<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      return walSplitter.tableDescriptors.get(tableName);<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    } catch (IOException e) {<a name="line.229"></a>
-<span class="sourceLineNo">230</span>      LOG.warn("Failed to get table descriptor for table {}", tableName, e);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      return null;<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    }<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  }<a name="line.233"></a>
-<span class="sourceLineNo">234</span>}<a name="line.234"></a>
+<span class="sourceLineNo">189</span>  @Override<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  boolean keepRegionEvent(Entry entry) {<a name="line.190"></a>
+<span class="sourceLineNo">191</span>    return false;<a name="line.191"></a>
+<span class="sourceLineNo">192</span>  }<a name="line.192"></a>
+<span class="sourceLineNo">193</span><a name="line.193"></a>
+<span class="sourceLineNo">194</span>  private StoreFileWriter createRecoveredHFileWriter(TableName tableName, String regionName,<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      long seqId, String familyName, boolean isMetaTable) throws IOException {<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    Path outputDir = WALSplitUtil.tryCreateRecoveredHFilesDir(walSplitter.rootFS, walSplitter.conf,<a name="line.196"></a>
+<span class="sourceLineNo">197</span>      tableName, regionName, familyName);<a name="line.197"></a>
+<span class="sourceLineNo">198</span>    StoreFileWriter.Builder writerBuilder =<a name="line.198"></a>
+<span class="sourceLineNo">199</span>        new StoreFileWriter.Builder(walSplitter.conf, CacheConfig.DISABLED, walSplitter.rootFS)<a name="line.199"></a>
+<span class="sourceLineNo">200</span>            .withOutputDir(outputDir);<a name="line.200"></a>
+<span class="sourceLineNo">201</span><a name="line.201"></a>
+<span class="sourceLineNo">202</span>    TableDescriptor tableDesc =<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        tableDescCache.computeIfAbsent(tableName, t -&gt; getTableDescriptor(t));<a name="line.203"></a>
+<span class="sourceLineNo">204</span>    if (tableDesc == null) {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>      throw new IOException("Failed to get table descriptor for table " + tableName);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>    }<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    ColumnFamilyDescriptor cfd = tableDesc.getColumnFamily(Bytes.toBytesBinary(familyName));<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    HFileContext hFileContext = createFileContext(cfd, isMetaTable);<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    return writerBuilder.withFileContext(hFileContext).withBloomType(cfd.getBloomFilterType())<a name="line.209"></a>
+<span class="sourceLineNo">210</span>        .build();<a name="line.210"></a>
+<span class="sourceLineNo">211</span>  }<a name="line.211"></a>
+<span class="sourceLineNo">212</span><a name="line.212"></a>
+<span class="sourceLineNo">213</span>  private HFileContext createFileContext(ColumnFamilyDescriptor cfd, boolean isMetaTable)<a name="line.213"></a>
+<span class="sourceLineNo">214</span>      throws IOException {<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    return new HFileContextBuilder().withCompression(cfd.getCompressionType())<a name="line.215"></a>
+<span class="sourceLineNo">216</span>        .withChecksumType(HStore.getChecksumType(walSplitter.conf))<a name="line.216"></a>
+<span class="sourceLineNo">217</span>        .withBytesPerCheckSum(HStore.getBytesPerChecksum(walSplitter.conf))<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        .withBlockSize(cfd.getBlocksize()).withCompressTags(cfd.isCompressTags())<a name="line.218"></a>
+<span class="sourceLineNo">219</span>        .withDataBlockEncoding(cfd.getDataBlockEncoding()).withCellComparator(<a name="line.219"></a>
+<span class="sourceLineNo">220</span>          isMetaTable ? CellComparatorImpl.META_COMPARATOR : CellComparatorImpl.COMPARATOR)<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        .build();<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  }<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>  private TableDescriptor getTableDescriptor(TableName tableName) {<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    if (walSplitter.rsServices != null) {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>      try {<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        return walSplitter.rsServices.getConnection().getAdmin().getDescriptor(tableName);<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      } catch (IOException e) {<a name="line.228"></a>
+<span class="sourceLineNo">229</span>        LOG.warn("Failed to get table descriptor for table {}", tableName, e);<a name="line.229"></a>
+<span class="sourceLineNo">230</span>      }<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    }<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    try {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      return walSplitter.tableDescriptors.get(tableName);<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    } catch (IOException e) {<a name="line.234"></a>
+<span class="sourceLineNo">235</span>      LOG.warn("Failed to get table descriptor for table {}", tableName, e);<a name="line.235"></a>
+<span class="sourceLineNo">236</span>      return null;<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    }<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  }<a name="line.238"></a>
+<span class="sourceLineNo">239</span>}<a name="line.239"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html
index 3cbebfe..7156dee 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/OutputSink.WriterThread.html
@@ -64,167 +64,170 @@
 <span class="sourceLineNo">056</span><a name="line.56"></a>
 <span class="sourceLineNo">057</span>  protected final AtomicLong totalSkippedEdits = new AtomicLong();<a name="line.57"></a>
 <span class="sourceLineNo">058</span><a name="line.58"></a>
-<span class="sourceLineNo">059</span>  protected final List&lt;Path&gt; splits = new ArrayList&lt;&gt;();<a name="line.59"></a>
-<span class="sourceLineNo">060</span><a name="line.60"></a>
-<span class="sourceLineNo">061</span>  /**<a name="line.61"></a>
-<span class="sourceLineNo">062</span>   * Used when close this output sink.<a name="line.62"></a>
-<span class="sourceLineNo">063</span>   */<a name="line.63"></a>
-<span class="sourceLineNo">064</span>  protected final ThreadPoolExecutor closeThreadPool;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>  protected final CompletionService&lt;Void&gt; closeCompletionService;<a name="line.65"></a>
-<span class="sourceLineNo">066</span><a name="line.66"></a>
-<span class="sourceLineNo">067</span>  public OutputSink(WALSplitter.PipelineController controller, EntryBuffers entryBuffers,<a name="line.67"></a>
-<span class="sourceLineNo">068</span>      int numWriters) {<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    this.numThreads = numWriters;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    this.controller = controller;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>    this.entryBuffers = entryBuffers;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    this.closeThreadPool = Threads.getBoundedCachedThreadPool(numThreads, 30L, TimeUnit.SECONDS,<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        Threads.newDaemonThreadFactory("split-log-closeStream-"));<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    this.closeCompletionService = new ExecutorCompletionService&lt;&gt;(closeThreadPool);<a name="line.74"></a>
-<span class="sourceLineNo">075</span>  }<a name="line.75"></a>
-<span class="sourceLineNo">076</span><a name="line.76"></a>
-<span class="sourceLineNo">077</span>  void setReporter(CancelableProgressable reporter) {<a name="line.77"></a>
-<span class="sourceLineNo">078</span>    this.reporter = reporter;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  }<a name="line.79"></a>
-<span class="sourceLineNo">080</span><a name="line.80"></a>
-<span class="sourceLineNo">081</span>  /**<a name="line.81"></a>
-<span class="sourceLineNo">082</span>   * Start the threads that will pump data from the entryBuffers to the output files.<a name="line.82"></a>
-<span class="sourceLineNo">083</span>   */<a name="line.83"></a>
-<span class="sourceLineNo">084</span>  void startWriterThreads() throws IOException {<a name="line.84"></a>
-<span class="sourceLineNo">085</span>    for (int i = 0; i &lt; numThreads; i++) {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>      WriterThread t = new WriterThread(controller, entryBuffers, this, i);<a name="line.86"></a>
-<span class="sourceLineNo">087</span>      t.start();<a name="line.87"></a>
-<span class="sourceLineNo">088</span>      writerThreads.add(t);<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    }<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  }<a name="line.90"></a>
-<span class="sourceLineNo">091</span><a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /**<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * Wait for writer threads to dump all info to the sink<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   *<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   * @return true when there is no error<a name="line.95"></a>
-<span class="sourceLineNo">096</span>   */<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  boolean finishWriterThreads() throws IOException {<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    LOG.debug("Waiting for split writer threads to finish");<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    boolean progressFailed = false;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    for (WriterThread t : writerThreads) {<a name="line.100"></a>
-<span class="sourceLineNo">101</span>      t.finish();<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    }<a name="line.102"></a>
-<span class="sourceLineNo">103</span><a name="line.103"></a>
-<span class="sourceLineNo">104</span>    for (WriterThread t : writerThreads) {<a name="line.104"></a>
-<span class="sourceLineNo">105</span>      if (!progressFailed &amp;&amp; reporter != null &amp;&amp; !reporter.progress()) {<a name="line.105"></a>
-<span class="sourceLineNo">106</span>        progressFailed = true;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>      }<a name="line.107"></a>
-<span class="sourceLineNo">108</span>      try {<a name="line.108"></a>
-<span class="sourceLineNo">109</span>        t.join();<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      } catch (InterruptedException ie) {<a name="line.110"></a>
-<span class="sourceLineNo">111</span>        IOException iie = new InterruptedIOException();<a name="line.111"></a>
-<span class="sourceLineNo">112</span>        iie.initCause(ie);<a name="line.112"></a>
-<span class="sourceLineNo">113</span>        throw iie;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      }<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    }<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    controller.checkForErrors();<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    LOG.info("{} split writer threads finished", this.writerThreads.size());<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    return (!progressFailed);<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  }<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>  long getTotalSkippedEdits() {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    return this.totalSkippedEdits.get();<a name="line.122"></a>
-<span class="sourceLineNo">123</span>  }<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>  /**<a name="line.125"></a>
-<span class="sourceLineNo">126</span>   * @return the number of currently opened writers<a name="line.126"></a>
-<span class="sourceLineNo">127</span>   */<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  abstract int getNumOpenWriters();<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   * @param buffer A buffer of some number of edits for a given region.<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   */<a name="line.132"></a>
-<span class="sourceLineNo">133</span>  abstract void append(EntryBuffers.RegionEntryBuffer buffer) throws IOException;<a name="line.133"></a>
-<span class="sourceLineNo">134</span><a name="line.134"></a>
-<span class="sourceLineNo">135</span>  abstract List&lt;Path&gt; close() throws IOException;<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>  /**<a name="line.137"></a>
-<span class="sourceLineNo">138</span>   * @return a map from encoded region ID to the number of edits written out for that region.<a name="line.138"></a>
-<span class="sourceLineNo">139</span>   */<a name="line.139"></a>
-<span class="sourceLineNo">140</span>  abstract Map&lt;String, Long&gt; getOutputCounts();<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>  /**<a name="line.142"></a>
-<span class="sourceLineNo">143</span>   * @return number of regions we've recovered<a name="line.143"></a>
-<span class="sourceLineNo">144</span>   */<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  abstract int getNumberOfRecoveredRegions();<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  /**<a name="line.147"></a>
-<span class="sourceLineNo">148</span>   * Some WALEdit's contain only KV's for account on what happened to a region. Not all sinks will<a name="line.148"></a>
-<span class="sourceLineNo">149</span>   * want to get all of those edits.<a name="line.149"></a>
-<span class="sourceLineNo">150</span>   * @return Return true if this sink wants to accept this region-level WALEdit.<a name="line.150"></a>
-<span class="sourceLineNo">151</span>   */<a name="line.151"></a>
-<span class="sourceLineNo">152</span>  abstract boolean keepRegionEvent(WAL.Entry entry);<a name="line.152"></a>
-<span class="sourceLineNo">153</span><a name="line.153"></a>
-<span class="sourceLineNo">154</span>  public static class WriterThread extends Thread {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    private volatile boolean shouldStop = false;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    private WALSplitter.PipelineController controller;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    private EntryBuffers entryBuffers;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    private OutputSink outputSink = null;<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>    WriterThread(WALSplitter.PipelineController controller, EntryBuffers entryBuffers,<a name="line.160"></a>
-<span class="sourceLineNo">161</span>        OutputSink sink, int i) {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      super(Thread.currentThread().getName() + "-Writer-" + i);<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      this.controller = controller;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      this.entryBuffers = entryBuffers;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      outputSink = sink;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
-<span class="sourceLineNo">167</span><a name="line.167"></a>
-<span class="sourceLineNo">168</span>    @Override<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    public void run()  {<a name="line.169"></a>
-<span class="sourceLineNo">170</span>      try {<a name="line.170"></a>
-<span class="sourceLineNo">171</span>        doRun();<a name="line.171"></a>
-<span class="sourceLineNo">172</span>      } catch (Throwable t) {<a name="line.172"></a>
-<span class="sourceLineNo">173</span>        LOG.error("Exiting thread", t);<a name="line.173"></a>
-<span class="sourceLineNo">174</span>        controller.writerThreadError(t);<a name="line.174"></a>
-<span class="sourceLineNo">175</span>      }<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    }<a name="line.176"></a>
-<span class="sourceLineNo">177</span><a name="line.177"></a>
-<span class="sourceLineNo">178</span>    private void doRun() throws IOException {<a name="line.178"></a>
-<span class="sourceLineNo">179</span>      LOG.trace("Writer thread starting");<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      while (true) {<a name="line.180"></a>
-<span class="sourceLineNo">181</span>        EntryBuffers.RegionEntryBuffer buffer = entryBuffers.getChunkToWrite();<a name="line.181"></a>
-<span class="sourceLineNo">182</span>        if (buffer == null) {<a name="line.182"></a>
-<span class="sourceLineNo">183</span>          // No data currently available, wait on some more to show up<a name="line.183"></a>
-<span class="sourceLineNo">184</span>          synchronized (controller.dataAvailable) {<a name="line.184"></a>
-<span class="sourceLineNo">185</span>            if (shouldStop) {<a name="line.185"></a>
-<span class="sourceLineNo">186</span>              return;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>            }<a name="line.187"></a>
-<span class="sourceLineNo">188</span>            try {<a name="line.188"></a>
-<span class="sourceLineNo">189</span>              controller.dataAvailable.wait(500);<a name="line.189"></a>
-<span class="sourceLineNo">190</span>            } catch (InterruptedException ie) {<a name="line.190"></a>
-<span class="sourceLineNo">191</span>              if (!shouldStop) {<a name="line.191"></a>
-<span class="sourceLineNo">192</span>                throw new RuntimeException(ie);<a name="line.192"></a>
-<span class="sourceLineNo">193</span>              }<a name="line.193"></a>
-<span class="sourceLineNo">194</span>            }<a name="line.194"></a>
-<span class="sourceLineNo">195</span>          }<a name="line.195"></a>
-<span class="sourceLineNo">196</span>          continue;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>        }<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>        assert buffer != null;<a name="line.199"></a>
-<span class="sourceLineNo">200</span>        try {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>          writeBuffer(buffer);<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        } finally {<a name="line.202"></a>
-<span class="sourceLineNo">203</span>          entryBuffers.doneWriting(buffer);<a name="line.203"></a>
-<span class="sourceLineNo">204</span>        }<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      }<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    }<a name="line.206"></a>
-<span class="sourceLineNo">207</span><a name="line.207"></a>
-<span class="sourceLineNo">208</span>    private void writeBuffer(EntryBuffers.RegionEntryBuffer buffer) throws IOException {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      outputSink.append(buffer);<a name="line.209"></a>
-<span class="sourceLineNo">210</span>    }<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>    private void finish() {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      synchronized (controller.dataAvailable) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        shouldStop = true;<a name="line.214"></a>
-<span class="sourceLineNo">215</span>        controller.dataAvailable.notifyAll();<a name="line.215"></a>
-<span class="sourceLineNo">216</span>      }<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    }<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  }<a name="line.218"></a>
-<span class="sourceLineNo">219</span>}<a name="line.219"></a>
+<span class="sourceLineNo">059</span>  /**<a name="line.59"></a>
+<span class="sourceLineNo">060</span>   * List of all the files produced by this sink<a name="line.60"></a>
+<span class="sourceLineNo">061</span>   */<a name="line.61"></a>
+<span class="sourceLineNo">062</span>  protected final List&lt;Path&gt; splits = new ArrayList&lt;&gt;();<a name="line.62"></a>
+<span class="sourceLineNo">063</span><a name="line.63"></a>
+<span class="sourceLineNo">064</span>  /**<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   * Used when close this output sink.<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   */<a name="line.66"></a>
+<span class="sourceLineNo">067</span>  protected final ThreadPoolExecutor closeThreadPool;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>  protected final CompletionService&lt;Void&gt; closeCompletionService;<a name="line.68"></a>
+<span class="sourceLineNo">069</span><a name="line.69"></a>
+<span class="sourceLineNo">070</span>  public OutputSink(WALSplitter.PipelineController controller, EntryBuffers entryBuffers,<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      int numWriters) {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>    this.numThreads = numWriters;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    this.controller = controller;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    this.entryBuffers = entryBuffers;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    this.closeThreadPool = Threads.getBoundedCachedThreadPool(numThreads, 30L, TimeUnit.SECONDS,<a name="line.75"></a>
+<span class="sourceLineNo">076</span>        Threads.newDaemonThreadFactory("split-log-closeStream-"));<a name="line.76"></a>
+<span class="sourceLineNo">077</span>    this.closeCompletionService = new ExecutorCompletionService&lt;&gt;(closeThreadPool);<a name="line.77"></a>
+<span class="sourceLineNo">078</span>  }<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>  void setReporter(CancelableProgressable reporter) {<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    this.reporter = reporter;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  }<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   * Start the threads that will pump data from the entryBuffers to the output files.<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  void startWriterThreads() throws IOException {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    for (int i = 0; i &lt; numThreads; i++) {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>      WriterThread t = new WriterThread(controller, entryBuffers, this, i);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>      t.start();<a name="line.90"></a>
+<span class="sourceLineNo">091</span>      writerThreads.add(t);<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    }<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  }<a name="line.93"></a>
+<span class="sourceLineNo">094</span><a name="line.94"></a>
+<span class="sourceLineNo">095</span>  /**<a name="line.95"></a>
+<span class="sourceLineNo">096</span>   * Wait for writer threads to dump all info to the sink<a name="line.96"></a>
+<span class="sourceLineNo">097</span>   *<a name="line.97"></a>
+<span class="sourceLineNo">098</span>   * @return true when there is no error<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   */<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  boolean finishWriterThreads() throws IOException {<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    LOG.debug("Waiting for split writer threads to finish");<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    boolean progressFailed = false;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    for (WriterThread t : writerThreads) {<a name="line.103"></a>
+<span class="sourceLineNo">104</span>      t.finish();<a name="line.104"></a>
+<span class="sourceLineNo">105</span>    }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>    for (WriterThread t : writerThreads) {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>      if (!progressFailed &amp;&amp; reporter != null &amp;&amp; !reporter.progress()) {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>        progressFailed = true;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      }<a name="line.110"></a>
+<span class="sourceLineNo">111</span>      try {<a name="line.111"></a>
+<span class="sourceLineNo">112</span>        t.join();<a name="line.112"></a>
+<span class="sourceLineNo">113</span>      } catch (InterruptedException ie) {<a name="line.113"></a>
+<span class="sourceLineNo">114</span>        IOException iie = new InterruptedIOException();<a name="line.114"></a>
+<span class="sourceLineNo">115</span>        iie.initCause(ie);<a name="line.115"></a>
+<span class="sourceLineNo">116</span>        throw iie;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      }<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    }<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    controller.checkForErrors();<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    LOG.info("{} split writer threads finished", this.writerThreads.size());<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    return (!progressFailed);<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  }<a name="line.122"></a>
+<span class="sourceLineNo">123</span><a name="line.123"></a>
+<span class="sourceLineNo">124</span>  long getTotalSkippedEdits() {<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    return this.totalSkippedEdits.get();<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  /**<a name="line.128"></a>
+<span class="sourceLineNo">129</span>   * @return the number of currently opened writers<a name="line.129"></a>
+<span class="sourceLineNo">130</span>   */<a name="line.130"></a>
+<span class="sourceLineNo">131</span>  abstract int getNumOpenWriters();<a name="line.131"></a>
+<span class="sourceLineNo">132</span><a name="line.132"></a>
+<span class="sourceLineNo">133</span>  /**<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   * @param buffer A buffer of some number of edits for a given region.<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   */<a name="line.135"></a>
+<span class="sourceLineNo">136</span>  abstract void append(EntryBuffers.RegionEntryBuffer buffer) throws IOException;<a name="line.136"></a>
+<span class="sourceLineNo">137</span><a name="line.137"></a>
+<span class="sourceLineNo">138</span>  abstract List&lt;Path&gt; close() throws IOException;<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>  /**<a name="line.140"></a>
+<span class="sourceLineNo">141</span>   * @return a map from encoded region ID to the number of edits written out for that region.<a name="line.141"></a>
+<span class="sourceLineNo">142</span>   */<a name="line.142"></a>
+<span class="sourceLineNo">143</span>  abstract Map&lt;String, Long&gt; getOutputCounts();<a name="line.143"></a>
+<span class="sourceLineNo">144</span><a name="line.144"></a>
+<span class="sourceLineNo">145</span>  /**<a name="line.145"></a>
+<span class="sourceLineNo">146</span>   * @return number of regions we've recovered<a name="line.146"></a>
+<span class="sourceLineNo">147</span>   */<a name="line.147"></a>
+<span class="sourceLineNo">148</span>  abstract int getNumberOfRecoveredRegions();<a name="line.148"></a>
+<span class="sourceLineNo">149</span><a name="line.149"></a>
+<span class="sourceLineNo">150</span>  /**<a name="line.150"></a>
+<span class="sourceLineNo">151</span>   * Some WALEdit's contain only KV's for account on what happened to a region. Not all sinks will<a name="line.151"></a>
+<span class="sourceLineNo">152</span>   * want to get all of those edits.<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   * @return Return true if this sink wants to accept this region-level WALEdit.<a name="line.153"></a>
+<span class="sourceLineNo">154</span>   */<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  abstract boolean keepRegionEvent(WAL.Entry entry);<a name="line.155"></a>
+<span class="sourceLineNo">156</span><a name="line.156"></a>
+<span class="sourceLineNo">157</span>  public static class WriterThread extends Thread {<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    private volatile boolean shouldStop = false;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    private WALSplitter.PipelineController controller;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>    private EntryBuffers entryBuffers;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>    private OutputSink outputSink = null;<a name="line.161"></a>
+<span class="sourceLineNo">162</span><a name="line.162"></a>
+<span class="sourceLineNo">163</span>    WriterThread(WALSplitter.PipelineController controller, EntryBuffers entryBuffers,<a name="line.163"></a>
+<span class="sourceLineNo">164</span>        OutputSink sink, int i) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      super(Thread.currentThread().getName() + "-Writer-" + i);<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      this.controller = controller;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      this.entryBuffers = entryBuffers;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>      outputSink = sink;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    }<a name="line.169"></a>
+<span class="sourceLineNo">170</span><a name="line.170"></a>
+<span class="sourceLineNo">171</span>    @Override<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    public void run()  {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>      try {<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        doRun();<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      } catch (Throwable t) {<a name="line.175"></a>
+<span class="sourceLineNo">176</span>        LOG.error("Exiting thread", t);<a name="line.176"></a>
+<span class="sourceLineNo">177</span>        controller.writerThreadError(t);<a name="line.177"></a>
+<span class="sourceLineNo">178</span>      }<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    }<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>    private void doRun() throws IOException {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>      LOG.trace("Writer thread starting");<a name="line.182"></a>
+<span class="sourceLineNo">183</span>      while (true) {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>        EntryBuffers.RegionEntryBuffer buffer = entryBuffers.getChunkToWrite();<a name="line.184"></a>
+<span class="sourceLineNo">185</span>        if (buffer == null) {<a name="line.185"></a>
+<span class="sourceLineNo">186</span>          // No data currently available, wait on some more to show up<a name="line.186"></a>
+<span class="sourceLineNo">187</span>          synchronized (controller.dataAvailable) {<a name="line.187"></a>
+<span class="sourceLineNo">188</span>            if (shouldStop) {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>              return;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>            }<a name="line.190"></a>
+<span class="sourceLineNo">191</span>            try {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>              controller.dataAvailable.wait(500);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>            } catch (InterruptedException ie) {<a name="line.193"></a>
+<span class="sourceLineNo">194</span>              if (!shouldStop) {<a name="line.194"></a>
+<span class="sourceLineNo">195</span>                throw new RuntimeException(ie);<a name="line.195"></a>
+<span class="sourceLineNo">196</span>              }<a name="line.196"></a>
+<span class="sourceLineNo">197</span>            }<a name="line.197"></a>
+<span class="sourceLineNo">198</span>          }<a name="line.198"></a>
+<span class="sourceLineNo">199</span>          continue;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>        }<a name="line.200"></a>
+<span class="sourceLineNo">201</span><a name="line.201"></a>
+<span class="sourceLineNo">202</span>        assert buffer != null;<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        try {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>          writeBuffer(buffer);<a name="line.204"></a>
+<span class="sourceLineNo">205</span>        } finally {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>          entryBuffers.doneWriting(buffer);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>        }<a name="line.207"></a>
+<span class="sourceLineNo">208</span>      }<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    }<a name="line.209"></a>
+<span class="sourceLineNo">210</span><a name="line.210"></a>
+<span class="sourceLineNo">211</span>    private void writeBuffer(EntryBuffers.RegionEntryBuffer buffer) throws IOException {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      outputSink.append(buffer);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>    }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>    private void finish() {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      synchronized (controller.dataAvailable) {<a name="line.216"></a>
+<span class="sourceLineNo">217</span>        shouldStop = true;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        controller.dataAvailable.notifyAll();<a name="line.218"></a>
+<span class="sourceLineNo">219</span>      }<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    }<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  }<a name="line.221"></a>
+<span class="sourceLineNo">222</span>}<a name="line.222"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/OutputSink.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/OutputSink.html
index 3cbebfe..7156dee 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/OutputSink.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/OutputSink.html
@@ -64,167 +64,170 @@
 <span class="sourceLineNo">056</span><a name="line.56"></a>
 <span class="sourceLineNo">057</span>  protected final AtomicLong totalSkippedEdits = new AtomicLong();<a name="line.57"></a>
 <span class="sourceLineNo">058</span><a name="line.58"></a>
-<span class="sourceLineNo">059</span>  protected final List&lt;Path&gt; splits = new ArrayList&lt;&gt;();<a name="line.59"></a>
-<span class="sourceLineNo">060</span><a name="line.60"></a>
-<span class="sourceLineNo">061</span>  /**<a name="line.61"></a>
-<span class="sourceLineNo">062</span>   * Used when close this output sink.<a name="line.62"></a>
-<span class="sourceLineNo">063</span>   */<a name="line.63"></a>
-<span class="sourceLineNo">064</span>  protected final ThreadPoolExecutor closeThreadPool;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>  protected final CompletionService&lt;Void&gt; closeCompletionService;<a name="line.65"></a>
-<span class="sourceLineNo">066</span><a name="line.66"></a>
-<span class="sourceLineNo">067</span>  public OutputSink(WALSplitter.PipelineController controller, EntryBuffers entryBuffers,<a name="line.67"></a>
-<span class="sourceLineNo">068</span>      int numWriters) {<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    this.numThreads = numWriters;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    this.controller = controller;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>    this.entryBuffers = entryBuffers;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    this.closeThreadPool = Threads.getBoundedCachedThreadPool(numThreads, 30L, TimeUnit.SECONDS,<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        Threads.newDaemonThreadFactory("split-log-closeStream-"));<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    this.closeCompletionService = new ExecutorCompletionService&lt;&gt;(closeThreadPool);<a name="line.74"></a>
-<span class="sourceLineNo">075</span>  }<a name="line.75"></a>
-<span class="sourceLineNo">076</span><a name="line.76"></a>
-<span class="sourceLineNo">077</span>  void setReporter(CancelableProgressable reporter) {<a name="line.77"></a>
-<span class="sourceLineNo">078</span>    this.reporter = reporter;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  }<a name="line.79"></a>
-<span class="sourceLineNo">080</span><a name="line.80"></a>
-<span class="sourceLineNo">081</span>  /**<a name="line.81"></a>
-<span class="sourceLineNo">082</span>   * Start the threads that will pump data from the entryBuffers to the output files.<a name="line.82"></a>
-<span class="sourceLineNo">083</span>   */<a name="line.83"></a>
-<span class="sourceLineNo">084</span>  void startWriterThreads() throws IOException {<a name="line.84"></a>
-<span class="sourceLineNo">085</span>    for (int i = 0; i &lt; numThreads; i++) {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>      WriterThread t = new WriterThread(controller, entryBuffers, this, i);<a name="line.86"></a>
-<span class="sourceLineNo">087</span>      t.start();<a name="line.87"></a>
-<span class="sourceLineNo">088</span>      writerThreads.add(t);<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    }<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  }<a name="line.90"></a>
-<span class="sourceLineNo">091</span><a name="line.91"></a>
-<span class="sourceLineNo">092</span>  /**<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * Wait for writer threads to dump all info to the sink<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   *<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   * @return true when there is no error<a name="line.95"></a>
-<span class="sourceLineNo">096</span>   */<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  boolean finishWriterThreads() throws IOException {<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    LOG.debug("Waiting for split writer threads to finish");<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    boolean progressFailed = false;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    for (WriterThread t : writerThreads) {<a name="line.100"></a>
-<span class="sourceLineNo">101</span>      t.finish();<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    }<a name="line.102"></a>
-<span class="sourceLineNo">103</span><a name="line.103"></a>
-<span class="sourceLineNo">104</span>    for (WriterThread t : writerThreads) {<a name="line.104"></a>
-<span class="sourceLineNo">105</span>      if (!progressFailed &amp;&amp; reporter != null &amp;&amp; !reporter.progress()) {<a name="line.105"></a>
-<span class="sourceLineNo">106</span>        progressFailed = true;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>      }<a name="line.107"></a>
-<span class="sourceLineNo">108</span>      try {<a name="line.108"></a>
-<span class="sourceLineNo">109</span>        t.join();<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      } catch (InterruptedException ie) {<a name="line.110"></a>
-<span class="sourceLineNo">111</span>        IOException iie = new InterruptedIOException();<a name="line.111"></a>
-<span class="sourceLineNo">112</span>        iie.initCause(ie);<a name="line.112"></a>
-<span class="sourceLineNo">113</span>        throw iie;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      }<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    }<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    controller.checkForErrors();<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    LOG.info("{} split writer threads finished", this.writerThreads.size());<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    return (!progressFailed);<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  }<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>  long getTotalSkippedEdits() {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    return this.totalSkippedEdits.get();<a name="line.122"></a>
-<span class="sourceLineNo">123</span>  }<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>  /**<a name="line.125"></a>
-<span class="sourceLineNo">126</span>   * @return the number of currently opened writers<a name="line.126"></a>
-<span class="sourceLineNo">127</span>   */<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  abstract int getNumOpenWriters();<a name="line.128"></a>
-<span class="sourceLineNo">129</span><a name="line.129"></a>
-<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   * @param buffer A buffer of some number of edits for a given region.<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   */<a name="line.132"></a>
-<span class="sourceLineNo">133</span>  abstract void append(EntryBuffers.RegionEntryBuffer buffer) throws IOException;<a name="line.133"></a>
-<span class="sourceLineNo">134</span><a name="line.134"></a>
-<span class="sourceLineNo">135</span>  abstract List&lt;Path&gt; close() throws IOException;<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>  /**<a name="line.137"></a>
-<span class="sourceLineNo">138</span>   * @return a map from encoded region ID to the number of edits written out for that region.<a name="line.138"></a>
-<span class="sourceLineNo">139</span>   */<a name="line.139"></a>
-<span class="sourceLineNo">140</span>  abstract Map&lt;String, Long&gt; getOutputCounts();<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>  /**<a name="line.142"></a>
-<span class="sourceLineNo">143</span>   * @return number of regions we've recovered<a name="line.143"></a>
-<span class="sourceLineNo">144</span>   */<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  abstract int getNumberOfRecoveredRegions();<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  /**<a name="line.147"></a>
-<span class="sourceLineNo">148</span>   * Some WALEdit's contain only KV's for account on what happened to a region. Not all sinks will<a name="line.148"></a>
-<span class="sourceLineNo">149</span>   * want to get all of those edits.<a name="line.149"></a>
-<span class="sourceLineNo">150</span>   * @return Return true if this sink wants to accept this region-level WALEdit.<a name="line.150"></a>
-<span class="sourceLineNo">151</span>   */<a name="line.151"></a>
-<span class="sourceLineNo">152</span>  abstract boolean keepRegionEvent(WAL.Entry entry);<a name="line.152"></a>
-<span class="sourceLineNo">153</span><a name="line.153"></a>
-<span class="sourceLineNo">154</span>  public static class WriterThread extends Thread {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    private volatile boolean shouldStop = false;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    private WALSplitter.PipelineController controller;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    private EntryBuffers entryBuffers;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    private OutputSink outputSink = null;<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>    WriterThread(WALSplitter.PipelineController controller, EntryBuffers entryBuffers,<a name="line.160"></a>
-<span class="sourceLineNo">161</span>        OutputSink sink, int i) {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      super(Thread.currentThread().getName() + "-Writer-" + i);<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      this.controller = controller;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      this.entryBuffers = entryBuffers;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      outputSink = sink;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
-<span class="sourceLineNo">167</span><a name="line.167"></a>
-<span class="sourceLineNo">168</span>    @Override<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    public void run()  {<a name="line.169"></a>
-<span class="sourceLineNo">170</span>      try {<a name="line.170"></a>
-<span class="sourceLineNo">171</span>        doRun();<a name="line.171"></a>
-<span class="sourceLineNo">172</span>      } catch (Throwable t) {<a name="line.172"></a>
-<span class="sourceLineNo">173</span>        LOG.error("Exiting thread", t);<a name="line.173"></a>
-<span class="sourceLineNo">174</span>        controller.writerThreadError(t);<a name="line.174"></a>
-<span class="sourceLineNo">175</span>      }<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    }<a name="line.176"></a>
-<span class="sourceLineNo">177</span><a name="line.177"></a>
-<span class="sourceLineNo">178</span>    private void doRun() throws IOException {<a name="line.178"></a>
-<span class="sourceLineNo">179</span>      LOG.trace("Writer thread starting");<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      while (true) {<a name="line.180"></a>
-<span class="sourceLineNo">181</span>        EntryBuffers.RegionEntryBuffer buffer = entryBuffers.getChunkToWrite();<a name="line.181"></a>
-<span class="sourceLineNo">182</span>        if (buffer == null) {<a name="line.182"></a>
-<span class="sourceLineNo">183</span>          // No data currently available, wait on some more to show up<a name="line.183"></a>
-<span class="sourceLineNo">184</span>          synchronized (controller.dataAvailable) {<a name="line.184"></a>
-<span class="sourceLineNo">185</span>            if (shouldStop) {<a name="line.185"></a>
-<span class="sourceLineNo">186</span>              return;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>            }<a name="line.187"></a>
-<span class="sourceLineNo">188</span>            try {<a name="line.188"></a>
-<span class="sourceLineNo">189</span>              controller.dataAvailable.wait(500);<a name="line.189"></a>
-<span class="sourceLineNo">190</span>            } catch (InterruptedException ie) {<a name="line.190"></a>
-<span class="sourceLineNo">191</span>              if (!shouldStop) {<a name="line.191"></a>
-<span class="sourceLineNo">192</span>                throw new RuntimeException(ie);<a name="line.192"></a>
-<span class="sourceLineNo">193</span>              }<a name="line.193"></a>
-<span class="sourceLineNo">194</span>            }<a name="line.194"></a>
-<span class="sourceLineNo">195</span>          }<a name="line.195"></a>
-<span class="sourceLineNo">196</span>          continue;<a name="line.196"></a>
-<span class="sourceLineNo">197</span>        }<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>        assert buffer != null;<a name="line.199"></a>
-<span class="sourceLineNo">200</span>        try {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>          writeBuffer(buffer);<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        } finally {<a name="line.202"></a>
-<span class="sourceLineNo">203</span>          entryBuffers.doneWriting(buffer);<a name="line.203"></a>
-<span class="sourceLineNo">204</span>        }<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      }<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    }<a name="line.206"></a>
-<span class="sourceLineNo">207</span><a name="line.207"></a>
-<span class="sourceLineNo">208</span>    private void writeBuffer(EntryBuffers.RegionEntryBuffer buffer) throws IOException {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      outputSink.append(buffer);<a name="line.209"></a>
-<span class="sourceLineNo">210</span>    }<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>    private void finish() {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      synchronized (controller.dataAvailable) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        shouldStop = true;<a name="line.214"></a>
-<span class="sourceLineNo">215</span>        controller.dataAvailable.notifyAll();<a name="line.215"></a>
-<span class="sourceLineNo">216</span>      }<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    }<a name="line.217"></a>
-<span class="sourceLineNo">218</span>  }<a name="line.218"></a>
-<span class="sourceLineNo">219</span>}<a name="line.219"></a>
+<span class="sourceLineNo">059</span>  /**<a name="line.59"></a>
+<span class="sourceLineNo">060</span>   * List of all the files produced by this sink<a name="line.60"></a>
+<span class="sourceLineNo">061</span>   */<a name="line.61"></a>
+<span class="sourceLineNo">062</span>  protected final List&lt;Path&gt; splits = new ArrayList&lt;&gt;();<a name="line.62"></a>
+<span class="sourceLineNo">063</span><a name="line.63"></a>
+<span class="sourceLineNo">064</span>  /**<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   * Used when close this output sink.<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   */<a name="line.66"></a>
+<span class="sourceLineNo">067</span>  protected final ThreadPoolExecutor closeThreadPool;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>  protected final CompletionService&lt;Void&gt; closeCompletionService;<a name="line.68"></a>
+<span class="sourceLineNo">069</span><a name="line.69"></a>
+<span class="sourceLineNo">070</span>  public OutputSink(WALSplitter.PipelineController controller, EntryBuffers entryBuffers,<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      int numWriters) {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>    this.numThreads = numWriters;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    this.controller = controller;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    this.entryBuffers = entryBuffers;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    this.closeThreadPool = Threads.getBoundedCachedThreadPool(numThreads, 30L, TimeUnit.SECONDS,<a name="line.75"></a>
+<span class="sourceLineNo">076</span>        Threads.newDaemonThreadFactory("split-log-closeStream-"));<a name="line.76"></a>
+<span class="sourceLineNo">077</span>    this.closeCompletionService = new ExecutorCompletionService&lt;&gt;(closeThreadPool);<a name="line.77"></a>
+<span class="sourceLineNo">078</span>  }<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>  void setReporter(CancelableProgressable reporter) {<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    this.reporter = reporter;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  }<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   * Start the threads that will pump data from the entryBuffers to the output files.<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  void startWriterThreads() throws IOException {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    for (int i = 0; i &lt; numThreads; i++) {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>      WriterThread t = new WriterThread(controller, entryBuffers, this, i);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>      t.start();<a name="line.90"></a>
+<span class="sourceLineNo">091</span>      writerThreads.add(t);<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    }<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  }<a name="line.93"></a>
+<span class="sourceLineNo">094</span><a name="line.94"></a>
+<span class="sourceLineNo">095</span>  /**<a name="line.95"></a>
+<span class="sourceLineNo">096</span>   * Wait for writer threads to dump all info to the sink<a name="line.96"></a>
+<span class="sourceLineNo">097</span>   *<a name="line.97"></a>
+<span class="sourceLineNo">098</span>   * @return true when there is no error<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   */<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  boolean finishWriterThreads() throws IOException {<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    LOG.debug("Waiting for split writer threads to finish");<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    boolean progressFailed = false;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    for (WriterThread t : writerThreads) {<a name="line.103"></a>
+<span class="sourceLineNo">104</span>      t.finish();<a name="line.104"></a>
+<span class="sourceLineNo">105</span>    }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>    for (WriterThread t : writerThreads) {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>      if (!progressFailed &amp;&amp; reporter != null &amp;&amp; !reporter.progress()) {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>        progressFailed = true;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      }<a name="line.110"></a>
+<span class="sourceLineNo">111</span>      try {<a name="line.111"></a>
+<span class="sourceLineNo">112</span>        t.join();<a name="line.112"></a>
+<span class="sourceLineNo">113</span>      } catch (InterruptedException ie) {<a name="line.113"></a>
+<span class="sourceLineNo">114</span>        IOException iie = new InterruptedIOException();<a name="line.114"></a>
+<span class="sourceLineNo">115</span>        iie.initCause(ie);<a name="line.115"></a>
+<span class="sourceLineNo">116</span>        throw iie;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      }<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    }<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    controller.checkForErrors();<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    LOG.info("{} split writer threads finished", this.writerThreads.size());<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    return (!progressFailed);<a name="line.121"></a>
+<span class="sourceLineNo">122</span>  }<a name="line.122"></a>
+<span class="sourceLineNo">123</span><a name="line.123"></a>
+<span class="sourceLineNo">124</span>  long getTotalSkippedEdits() {<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    return this.totalSkippedEdits.get();<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  /**<a name="line.128"></a>
+<span class="sourceLineNo">129</span>   * @return the number of currently opened writers<a name="line.129"></a>
+<span class="sourceLineNo">130</span>   */<a name="line.130"></a>
+<span class="sourceLineNo">131</span>  abstract int getNumOpenWriters();<a name="line.131"></a>
+<span class="sourceLineNo">132</span><a name="line.132"></a>
+<span class="sourceLineNo">133</span>  /**<a name="line.133"></a>
+<span class="sourceLineNo">134</span>   * @param buffer A buffer of some number of edits for a given region.<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   */<a name="line.135"></a>
+<span class="sourceLineNo">136</span>  abstract void append(EntryBuffers.RegionEntryBuffer buffer) throws IOException;<a name="line.136"></a>
+<span class="sourceLineNo">137</span><a name="line.137"></a>
+<span class="sourceLineNo">138</span>  abstract List&lt;Path&gt; close() throws IOException;<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>  /**<a name="line.140"></a>
+<span class="sourceLineNo">141</span>   * @return a map from encoded region ID to the number of edits written out for that region.<a name="line.141"></a>
+<span class="sourceLineNo">142</span>   */<a name="line.142"></a>
+<span class="sourceLineNo">143</span>  abstract Map&lt;String, Long&gt; getOutputCounts();<a name="line.143"></a>
+<span class="sourceLineNo">144</span><a name="line.144"></a>
+<span class="sourceLineNo">145</span>  /**<a name="line.145"></a>
+<span class="sourceLineNo">146</span>   * @return number of regions we've recovered<a name="line.146"></a>
+<span class="sourceLineNo">147</span>   */<a name="line.147"></a>
+<span class="sourceLineNo">148</span>  abstract int getNumberOfRecoveredRegions();<a name="line.148"></a>
+<span class="sourceLineNo">149</span><a name="line.149"></a>
+<span class="sourceLineNo">150</span>  /**<a name="line.150"></a>
+<span class="sourceLineNo">151</span>   * Some WALEdit's contain only KV's for account on what happened to a region. Not all sinks will<a name="line.151"></a>
+<span class="sourceLineNo">152</span>   * want to get all of those edits.<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   * @return Return true if this sink wants to accept this region-level WALEdit.<a name="line.153"></a>
+<span class="sourceLineNo">154</span>   */<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  abstract boolean keepRegionEvent(WAL.Entry entry);<a name="line.155"></a>
+<span class="sourceLineNo">156</span><a name="line.156"></a>
+<span class="sourceLineNo">157</span>  public static class WriterThread extends Thread {<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    private volatile boolean shouldStop = false;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    private WALSplitter.PipelineController controller;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>    private EntryBuffers entryBuffers;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>    private OutputSink outputSink = null;<a name="line.161"></a>
+<span class="sourceLineNo">162</span><a name="line.162"></a>
+<span class="sourceLineNo">163</span>    WriterThread(WALSplitter.PipelineController controller, EntryBuffers entryBuffers,<a name="line.163"></a>
+<span class="sourceLineNo">164</span>        OutputSink sink, int i) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      super(Thread.currentThread().getName() + "-Writer-" + i);<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      this.controller = controller;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      this.entryBuffers = entryBuffers;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>      outputSink = sink;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    }<a name="line.169"></a>
+<span class="sourceLineNo">170</span><a name="line.170"></a>
+<span class="sourceLineNo">171</span>    @Override<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    public void run()  {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>      try {<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        doRun();<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      } catch (Throwable t) {<a name="line.175"></a>
+<span class="sourceLineNo">176</span>        LOG.error("Exiting thread", t);<a name="line.176"></a>
+<span class="sourceLineNo">177</span>        controller.writerThreadError(t);<a name="line.177"></a>
+<span class="sourceLineNo">178</span>      }<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    }<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>    private void doRun() throws IOException {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>      LOG.trace("Writer thread starting");<a name="line.182"></a>
+<span class="sourceLineNo">183</span>      while (true) {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>        EntryBuffers.RegionEntryBuffer buffer = entryBuffers.getChunkToWrite();<a name="line.184"></a>
+<span class="sourceLineNo">185</span>        if (buffer == null) {<a name="line.185"></a>
+<span class="sourceLineNo">186</span>          // No data currently available, wait on some more to show up<a name="line.186"></a>
+<span class="sourceLineNo">187</span>          synchronized (controller.dataAvailable) {<a name="line.187"></a>
+<span class="sourceLineNo">188</span>            if (shouldStop) {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>              return;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>            }<a name="line.190"></a>
+<span class="sourceLineNo">191</span>            try {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>              controller.dataAvailable.wait(500);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>            } catch (InterruptedException ie) {<a name="line.193"></a>
+<span class="sourceLineNo">194</span>              if (!shouldStop) {<a name="line.194"></a>
+<span class="sourceLineNo">195</span>                throw new RuntimeException(ie);<a name="line.195"></a>
+<span class="sourceLineNo">196</span>              }<a name="line.196"></a>
+<span class="sourceLineNo">197</span>            }<a name="line.197"></a>
+<span class="sourceLineNo">198</span>          }<a name="line.198"></a>
+<span class="sourceLineNo">199</span>          continue;<a name="line.199"></a>
+<span class="sourceLineNo">200</span>        }<a name="line.200"></a>
+<span class="sourceLineNo">201</span><a name="line.201"></a>
+<span class="sourceLineNo">202</span>        assert buffer != null;<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        try {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>          writeBuffer(buffer);<a name="line.204"></a>
+<span class="sourceLineNo">205</span>        } finally {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>          entryBuffers.doneWriting(buffer);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>        }<a name="line.207"></a>
+<span class="sourceLineNo">208</span>      }<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    }<a name="line.209"></a>
+<span class="sourceLineNo">210</span><a name="line.210"></a>
+<span class="sourceLineNo">211</span>    private void writeBuffer(EntryBuffers.RegionEntryBuffer buffer) throws IOException {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      outputSink.append(buffer);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>    }<a name="line.213"></a>
+<span class="sourceLineNo">214</span><a name="line.214"></a>
+<span class="sourceLineNo">215</span>    private void finish() {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      synchronized (controller.dataAvailable) {<a name="line.216"></a>
+<span class="sourceLineNo">217</span>        shouldStop = true;<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        controller.dataAvailable.notifyAll();<a name="line.218"></a>
+<span class="sourceLineNo">219</span>      }<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    }<a name="line.220"></a>
+<span class="sourceLineNo">221</span>  }<a name="line.221"></a>
+<span class="sourceLineNo">222</span>}<a name="line.222"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html
index 71ded63..f846139 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/RecoveredEditsOutputSink.html
@@ -6,7 +6,7 @@
 </head>
 <body>
 <div class="sourceContainer">
-<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a>
+<pre><span class="sourceLineNo">001</span>/*<a name="line.1"></a>
 <span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a>
 <span class="sourceLineNo">003</span> * or more contributor license agreements.  See the NOTICE file<a name="line.3"></a>
 <span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a>
@@ -34,133 +34,137 @@
 <span class="sourceLineNo">026</span>import java.util.concurrent.ConcurrentMap;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.util.concurrent.ExecutionException;<a name="line.27"></a>
 <span class="sourceLineNo">028</span>import java.util.concurrent.Future;<a name="line.28"></a>
-<span class="sourceLineNo">029</span><a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.fs.Path;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.TableName;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.slf4j.Logger;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.slf4j.LoggerFactory;<a name="line.36"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.fs.Path;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.TableName;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.slf4j.Logger;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.slf4j.LoggerFactory;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.36"></a>
 <span class="sourceLineNo">037</span><a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.38"></a>
-<span class="sourceLineNo">039</span><a name="line.39"></a>
-<span class="sourceLineNo">040</span>/**<a name="line.40"></a>
-<span class="sourceLineNo">041</span> * Class that manages the output streams from the log splitting process.<a name="line.41"></a>
-<span class="sourceLineNo">042</span> * Every region only has one recovered edits.<a name="line.42"></a>
-<span class="sourceLineNo">043</span> */<a name="line.43"></a>
-<span class="sourceLineNo">044</span>@InterfaceAudience.Private<a name="line.44"></a>
-<span class="sourceLineNo">045</span>class RecoveredEditsOutputSink extends AbstractRecoveredEditsOutputSink {<a name="line.45"></a>
-<span class="sourceLineNo">046</span>  private static final Logger LOG = LoggerFactory.getLogger(RecoveredEditsOutputSink.class);<a name="line.46"></a>
-<span class="sourceLineNo">047</span>  private ConcurrentMap&lt;String, RecoveredEditsWriter&gt; writers = new ConcurrentHashMap&lt;&gt;();<a name="line.47"></a>
-<span class="sourceLineNo">048</span><a name="line.48"></a>
-<span class="sourceLineNo">049</span>  public RecoveredEditsOutputSink(WALSplitter walSplitter,<a name="line.49"></a>
-<span class="sourceLineNo">050</span>      WALSplitter.PipelineController controller, EntryBuffers entryBuffers, int numWriters) {<a name="line.50"></a>
-<span class="sourceLineNo">051</span>    super(walSplitter, controller, entryBuffers, numWriters);<a name="line.51"></a>
-<span class="sourceLineNo">052</span>  }<a name="line.52"></a>
-<span class="sourceLineNo">053</span><a name="line.53"></a>
-<span class="sourceLineNo">054</span>  @Override<a name="line.54"></a>
-<span class="sourceLineNo">055</span>  public void append(EntryBuffers.RegionEntryBuffer buffer) throws IOException {<a name="line.55"></a>
-<span class="sourceLineNo">056</span>    List&lt;WAL.Entry&gt; entries = buffer.entryBuffer;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>    if (entries.isEmpty()) {<a name="line.57"></a>
-<span class="sourceLineNo">058</span>      LOG.warn("got an empty buffer, skipping");<a name="line.58"></a>
-<span class="sourceLineNo">059</span>      return;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>    }<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    RecoveredEditsWriter writer =<a name="line.61"></a>
-<span class="sourceLineNo">062</span>      getRecoveredEditsWriter(buffer.tableName, buffer.encodedRegionName,<a name="line.62"></a>
-<span class="sourceLineNo">063</span>        entries.get(0).getKey().getSequenceId());<a name="line.63"></a>
-<span class="sourceLineNo">064</span>    if (writer != null) {<a name="line.64"></a>
-<span class="sourceLineNo">065</span>      writer.writeRegionEntries(entries);<a name="line.65"></a>
-<span class="sourceLineNo">066</span>    }<a name="line.66"></a>
-<span class="sourceLineNo">067</span>  }<a name="line.67"></a>
-<span class="sourceLineNo">068</span><a name="line.68"></a>
-<span class="sourceLineNo">069</span>  /**<a name="line.69"></a>
-<span class="sourceLineNo">070</span>   * Get a writer and path for a log starting at the given entry. This function is threadsafe so<a name="line.70"></a>
-<span class="sourceLineNo">071</span>   * long as multiple threads are always acting on different regions.<a name="line.71"></a>
-<span class="sourceLineNo">072</span>   * @return null if this region shouldn't output any logs<a name="line.72"></a>
-<span class="sourceLineNo">073</span>   */<a name="line.73"></a>
-<span class="sourceLineNo">074</span>  private RecoveredEditsWriter getRecoveredEditsWriter(TableName tableName, byte[] region,<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    long seqId) throws IOException {<a name="line.75"></a>
-<span class="sourceLineNo">076</span>    RecoveredEditsWriter ret = writers.get(Bytes.toString(region));<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    if (ret != null) {<a name="line.77"></a>
-<span class="sourceLineNo">078</span>      return ret;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>    }<a name="line.79"></a>
-<span class="sourceLineNo">080</span>    ret = createRecoveredEditsWriter(tableName, region, seqId);<a name="line.80"></a>
-<span class="sourceLineNo">081</span>    if (ret == null) {<a name="line.81"></a>
-<span class="sourceLineNo">082</span>      return null;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    }<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    writers.put(Bytes.toString(region), ret);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>    return ret;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>  }<a name="line.86"></a>
-<span class="sourceLineNo">087</span><a name="line.87"></a>
-<span class="sourceLineNo">088</span>  @Override<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  public List&lt;Path&gt; close() throws IOException {<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    boolean isSuccessful = true;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    try {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>      isSuccessful &amp;= finishWriterThreads();<a name="line.92"></a>
-<span class="sourceLineNo">093</span>    } finally {<a name="line.93"></a>
-<span class="sourceLineNo">094</span>      isSuccessful &amp;= closeWriters();<a name="line.94"></a>
-<span class="sourceLineNo">095</span>    }<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    return isSuccessful ? splits : null;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  }<a name="line.97"></a>
-<span class="sourceLineNo">098</span><a name="line.98"></a>
-<span class="sourceLineNo">099</span>  /**<a name="line.99"></a>
-<span class="sourceLineNo">100</span>   * Close all of the output streams.<a name="line.100"></a>
-<span class="sourceLineNo">101</span>   *<a name="line.101"></a>
-<span class="sourceLineNo">102</span>   * @return true when there is no error.<a name="line.102"></a>
-<span class="sourceLineNo">103</span>   */<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  private boolean closeWriters() throws IOException {<a name="line.104"></a>
-<span class="sourceLineNo">105</span>    List&lt;IOException&gt; thrown = Lists.newArrayList();<a name="line.105"></a>
-<span class="sourceLineNo">106</span>    for (RecoveredEditsWriter writer : writers.values()) {<a name="line.106"></a>
-<span class="sourceLineNo">107</span>      closeCompletionService.submit(() -&gt; {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>        Path dst = closeRecoveredEditsWriter(writer, thrown);<a name="line.108"></a>
-<span class="sourceLineNo">109</span>        splits.add(dst);<a name="line.109"></a>
-<span class="sourceLineNo">110</span>        return null;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>      });<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    }<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    boolean progressFailed = false;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    try {<a name="line.114"></a>
-<span class="sourceLineNo">115</span>      for (int i = 0, n = this.writers.size(); i &lt; n; i++) {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>        Future&lt;Void&gt; future = closeCompletionService.take();<a name="line.116"></a>
-<span class="sourceLineNo">117</span>        future.get();<a name="line.117"></a>
-<span class="sourceLineNo">118</span>        if (!progressFailed &amp;&amp; reporter != null &amp;&amp; !reporter.progress()) {<a name="line.118"></a>
-<span class="sourceLineNo">119</span>          progressFailed = true;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>        }<a name="line.120"></a>
-<span class="sourceLineNo">121</span>      }<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    } catch (InterruptedException e) {<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      IOException iie = new InterruptedIOException();<a name="line.123"></a>
-<span class="sourceLineNo">124</span>      iie.initCause(e);<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      throw iie;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    } catch (ExecutionException e) {<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      throw new IOException(e.getCause());<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    } finally {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>      closeThreadPool.shutdownNow();<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    }<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    if (!thrown.isEmpty()) {<a name="line.131"></a>
-<span class="sourceLineNo">132</span>      throw MultipleIOException.createIOException(thrown);<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    }<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    return !progressFailed;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  }<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>  @Override<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public Map&lt;String, Long&gt; getOutputCounts() {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    TreeMap&lt;String, Long&gt; ret = new TreeMap&lt;&gt;();<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    for (Map.Entry&lt;String, RecoveredEditsWriter&gt; entry : writers.entrySet()) {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      ret.put(entry.getKey(), entry.getValue().editsWritten);<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    }<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    return ret;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  }<a name="line.144"></a>
-<span class="sourceLineNo">145</span><a name="line.145"></a>
-<span class="sourceLineNo">146</span>  @Override<a name="line.146"></a>
-<span class="sourceLineNo">147</span>  public int getNumberOfRecoveredRegions() {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    return writers.size();<a name="line.148"></a>
-<span class="sourceLineNo">149</span>  }<a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>  @Override<a name="line.151"></a>
-<span class="sourceLineNo">152</span>  int getNumOpenWriters() {<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    return writers.size();<a name="line.153"></a>
-<span class="sourceLineNo">154</span>  }<a name="line.154"></a>
-<span class="sourceLineNo">155</span>}<a name="line.155"></a>
+<span class="sourceLineNo">038</span>/**<a name="line.38"></a>
+<span class="sourceLineNo">039</span> * Class that manages the output streams from the log splitting process.<a name="line.39"></a>
+<span class="sourceLineNo">040</span> * Every region only has one recovered edits file PER split WAL (if we split<a name="line.40"></a>
+<span class="sourceLineNo">041</span> * multiple WALs during a log-splitting session, on open, a Region may<a name="line.41"></a>
+<span class="sourceLineNo">042</span> * have multiple recovered.edits files to replay -- one per split WAL).<a name="line.42"></a>
+<span class="sourceLineNo">043</span> * @see BoundedRecoveredEditsOutputSink which is like this class but imposes upper bound on<a name="line.43"></a>
+<span class="sourceLineNo">044</span> *   the number of writers active at one time (makes for better throughput).<a name="line.44"></a>
+<span class="sourceLineNo">045</span> */<a name="line.45"></a>
+<span class="sourceLineNo">046</span>@InterfaceAudience.Private<a name="line.46"></a>
+<span class="sourceLineNo">047</span>class RecoveredEditsOutputSink extends AbstractRecoveredEditsOutputSink {<a name="line.47"></a>
+<span class="sourceLineNo">048</span>  private static final Logger LOG = LoggerFactory.getLogger(RecoveredEditsOutputSink.class);<a name="line.48"></a>
+<span class="sourceLineNo">049</span>  private ConcurrentMap&lt;String, RecoveredEditsWriter&gt; writers = new ConcurrentHashMap&lt;&gt;();<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>  public RecoveredEditsOutputSink(WALSplitter walSplitter,<a name="line.51"></a>
+<span class="sourceLineNo">052</span>      WALSplitter.PipelineController controller, EntryBuffers entryBuffers, int numWriters) {<a name="line.52"></a>
+<span class="sourceLineNo">053</span>    super(walSplitter, controller, entryBuffers, numWriters);<a name="line.53"></a>
+<span class="sourceLineNo">054</span>  }<a name="line.54"></a>
+<span class="sourceLineNo">055</span><a name="line.55"></a>
+<span class="sourceLineNo">056</span>  @Override<a name="line.56"></a>
+<span class="sourceLineNo">057</span>  public void append(EntryBuffers.RegionEntryBuffer buffer) throws IOException {<a name="line.57"></a>
+<span class="sourceLineNo">058</span>    List&lt;WAL.Entry&gt; entries = buffer.entryBuffer;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>    if (entries.isEmpty()) {<a name="line.59"></a>
+<span class="sourceLineNo">060</span>      LOG.warn("got an empty buffer, skipping");<a name="line.60"></a>
+<span class="sourceLineNo">061</span>      return;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>    }<a name="line.62"></a>
+<span class="sourceLineNo">063</span>    RecoveredEditsWriter writer =<a name="line.63"></a>
+<span class="sourceLineNo">064</span>      getRecoveredEditsWriter(buffer.tableName, buffer.encodedRegionName,<a name="line.64"></a>
+<span class="sourceLineNo">065</span>        entries.get(0).getKey().getSequenceId());<a name="line.65"></a>
+<span class="sourceLineNo">066</span>    if (writer != null) {<a name="line.66"></a>
+<span class="sourceLineNo">067</span>      writer.writeRegionEntries(entries);<a name="line.67"></a>
+<span class="sourceLineNo">068</span>    }<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  }<a name="line.69"></a>
+<span class="sourceLineNo">070</span><a name="line.70"></a>
+<span class="sourceLineNo">071</span>  /**<a name="line.71"></a>
+<span class="sourceLineNo">072</span>   * Get a writer and path for a log starting at the given entry. This function is threadsafe so<a name="line.72"></a>
+<span class="sourceLineNo">073</span>   * long as multiple threads are always acting on different regions.<a name="line.73"></a>
+<span class="sourceLineNo">074</span>   * @return null if this region shouldn't output any logs<a name="line.74"></a>
+<span class="sourceLineNo">075</span>   */<a name="line.75"></a>
+<span class="sourceLineNo">076</span>  private RecoveredEditsWriter getRecoveredEditsWriter(TableName tableName, byte[] region,<a name="line.76"></a>
+<span class="sourceLineNo">077</span>    long seqId) throws IOException {<a name="line.77"></a>
+<span class="sourceLineNo">078</span>    RecoveredEditsWriter ret = writers.get(Bytes.toString(region));<a name="line.78"></a>
+<span class="sourceLineNo">079</span>    if (ret != null) {<a name="line.79"></a>
+<span class="sourceLineNo">080</span>      return ret;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    }<a name="line.81"></a>
+<span class="sourceLineNo">082</span>    ret = createRecoveredEditsWriter(tableName, region, seqId);<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    if (ret == null) {<a name="line.83"></a>
+<span class="sourceLineNo">084</span>      return null;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>    }<a name="line.85"></a>
+<span class="sourceLineNo">086</span>    LOG.trace("Created {}", ret.path);<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    writers.put(Bytes.toString(region), ret);<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    return ret;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>  }<a name="line.89"></a>
+<span class="sourceLineNo">090</span><a name="line.90"></a>
+<span class="sourceLineNo">091</span>  @Override<a name="line.91"></a>
+<span class="sourceLineNo">092</span>  public List&lt;Path&gt; close() throws IOException {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    boolean isSuccessful = true;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    try {<a name="line.94"></a>
+<span class="sourceLineNo">095</span>      isSuccessful &amp;= finishWriterThreads();<a name="line.95"></a>
+<span class="sourceLineNo">096</span>    } finally {<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      isSuccessful &amp;= closeWriters();<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    }<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    return isSuccessful ? splits : null;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
+<span class="sourceLineNo">101</span><a name="line.101"></a>
+<span class="sourceLineNo">102</span>  /**<a name="line.102"></a>
+<span class="sourceLineNo">103</span>   * Close all of the output streams.<a name="line.103"></a>
+<span class="sourceLineNo">104</span>   *<a name="line.104"></a>
+<span class="sourceLineNo">105</span>   * @return true when there is no error.<a name="line.105"></a>
+<span class="sourceLineNo">106</span>   */<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private boolean closeWriters() throws IOException {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    List&lt;IOException&gt; thrown = Lists.newArrayList();<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    for (RecoveredEditsWriter writer : writers.values()) {<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      closeCompletionService.submit(() -&gt; {<a name="line.110"></a>
+<span class="sourceLineNo">111</span>        Path dst = closeRecoveredEditsWriter(writer, thrown);<a name="line.111"></a>
+<span class="sourceLineNo">112</span>        LOG.trace("Closed {}", dst);<a name="line.112"></a>
+<span class="sourceLineNo">113</span>        splits.add(dst);<a name="line.113"></a>
+<span class="sourceLineNo">114</span>        return null;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      });<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    }<a name="line.116"></a>
+<span class="sourceLineNo">117</span>    boolean progressFailed = false;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    try {<a name="line.118"></a>
+<span class="sourceLineNo">119</span>      for (int i = 0, n = this.writers.size(); i &lt; n; i++) {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>        Future&lt;Void&gt; future = closeCompletionService.take();<a name="line.120"></a>
+<span class="sourceLineNo">121</span>        future.get();<a name="line.121"></a>
+<span class="sourceLineNo">122</span>        if (!progressFailed &amp;&amp; reporter != null &amp;&amp; !reporter.progress()) {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>          progressFailed = true;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>        }<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      }<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    } catch (InterruptedException e) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      IOException iie = new InterruptedIOException();<a name="line.127"></a>
+<span class="sourceLineNo">128</span>      iie.initCause(e);<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      throw iie;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    } catch (ExecutionException e) {<a name="line.130"></a>
+<span class="sourceLineNo">131</span>      throw new IOException(e.getCause());<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    } finally {<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      closeThreadPool.shutdownNow();<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    }<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    if (!thrown.isEmpty()) {<a name="line.135"></a>
+<span class="sourceLineNo">136</span>      throw MultipleIOException.createIOException(thrown);<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    }<a name="line.137"></a>
+<span class="sourceLineNo">138</span>    return !progressFailed;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>  }<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>  @Override<a name="line.141"></a>
+<span class="sourceLineNo">142</span>  public Map&lt;String, Long&gt; getOutputCounts() {<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    TreeMap&lt;String, Long&gt; ret = new TreeMap&lt;&gt;();<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    for (Map.Entry&lt;String, RecoveredEditsWriter&gt; entry : writers.entrySet()) {<a name="line.144"></a>
+<span class="sourceLineNo">145</span>      ret.put(entry.getKey(), entry.getValue().editsWritten);<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    }<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    return ret;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>  }<a name="line.148"></a>
+<span class="sourceLineNo">149</span><a name="line.149"></a>
+<span class="sourceLineNo">150</span>  @Override<a name="line.150"></a>
+<span class="sourceLineNo">151</span>  public int getNumberOfRecoveredRegions() {<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    return writers.size();<a name="line.152"></a>
+<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  @Override<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  int getNumOpenWriters() {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    return writers.size();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
+<span class="sourceLineNo">159</span>}<a name="line.159"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
index 8b6211e..e7488c7 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
@@ -6,7 +6,7 @@
 </head>
 <body>
 <div class="sourceContainer">
-<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a>
+<pre><span class="sourceLineNo">001</span>/*<a name="line.1"></a>
 <span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a>
 <span class="sourceLineNo">003</span> * or more contributor license agreements.  See the NOTICE file<a name="line.3"></a>
 <span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a>
@@ -25,532 +25,548 @@
 <span class="sourceLineNo">017</span> */<a name="line.17"></a>
 <span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.wal;<a name="line.18"></a>
 <span class="sourceLineNo">019</span><a name="line.19"></a>
-<span class="sourceLineNo">020</span>import static org.apache.hadoop.hbase.wal.BoundedRecoveredHFilesOutputSink.DEFAULT_WAL_SPLIT_TO_HFILE;<a name="line.20"></a>
-<span class="sourceLineNo">021</span>import static org.apache.hadoop.hbase.wal.BoundedRecoveredHFilesOutputSink.WAL_SPLIT_TO_HFILE;<a name="line.21"></a>
-<span class="sourceLineNo">022</span>import static org.apache.hadoop.hbase.wal.WALSplitUtil.finishSplitLogFile;<a name="line.22"></a>
-<span class="sourceLineNo">023</span><a name="line.23"></a>
-<span class="sourceLineNo">024</span>import java.io.EOFException;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import java.io.FileNotFoundException;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import java.io.IOException;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import java.io.InterruptedIOException;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import java.text.ParseException;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.util.ArrayList;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import java.util.Collections;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import java.util.List;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import java.util.Map;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import java.util.TreeMap;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.conf.Configuration;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileStatus;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileSystem;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.Path;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.HConstants;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.TableDescriptors;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.procedure2.util.StringUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.regionserver.RegionServerServices;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.CommonFSUtils;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.util.FSTableDescriptors;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.util.RecoverLeaseFSUtils;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.slf4j.Logger;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.slf4j.LoggerFactory;<a name="line.65"></a>
-<span class="sourceLineNo">066</span><a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.69"></a>
-<span class="sourceLineNo">070</span><a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;<a name="line.72"></a>
-<span class="sourceLineNo">073</span><a name="line.73"></a>
-<span class="sourceLineNo">074</span>/**<a name="line.74"></a>
-<span class="sourceLineNo">075</span> * This class is responsible for splitting up a bunch of regionserver commit log<a name="line.75"></a>
-<span class="sourceLineNo">076</span> * files that are no longer being written to, into new files, one per region, for<a name="line.76"></a>
-<span class="sourceLineNo">077</span> * recovering data on startup. Delete the old log files when finished.<a name="line.77"></a>
-<span class="sourceLineNo">078</span> */<a name="line.78"></a>
-<span class="sourceLineNo">079</span>@InterfaceAudience.Private<a name="line.79"></a>
-<span class="sourceLineNo">080</span>public class WALSplitter {<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  private static final Logger LOG = LoggerFactory.getLogger(WALSplitter.class);<a name="line.81"></a>
-<span class="sourceLineNo">082</span><a name="line.82"></a>
-<span class="sourceLineNo">083</span>  /** By default we retry errors in splitting, rather than skipping. */<a name="line.83"></a>
-<span class="sourceLineNo">084</span>  public static final boolean SPLIT_SKIP_ERRORS_DEFAULT = false;<a name="line.84"></a>
-<span class="sourceLineNo">085</span><a name="line.85"></a>
-<span class="sourceLineNo">086</span>  // Parameters for split process<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  protected final Path walDir;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  protected final FileSystem walFS;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  protected final Configuration conf;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  final Path rootDir;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  final FileSystem rootFS;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  final RegionServerServices rsServices;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  final TableDescriptors tableDescriptors;<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  // Major subcomponents of the split process.<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  // These are separated into inner classes to make testing easier.<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  OutputSink outputSink;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  private EntryBuffers entryBuffers;<a name="line.98"></a>
-<span class="sourceLineNo">099</span><a name="line.99"></a>
-<span class="sourceLineNo">100</span>  private SplitLogWorkerCoordination splitLogWorkerCoordination;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  private final WALFactory walFactory;<a name="line.101"></a>
-<span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private MonitoredTask status;<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  // For checking the latest flushed sequence id<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  protected final LastSequenceId sequenceIdChecker;<a name="line.106"></a>
+<span class="sourceLineNo">020</span>import static org.apache.hadoop.hbase.wal.WALSplitUtil.finishSplitLogFile;<a name="line.20"></a>
+<span class="sourceLineNo">021</span>import java.io.EOFException;<a name="line.21"></a>
+<span class="sourceLineNo">022</span>import java.io.FileNotFoundException;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import java.io.IOException;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import java.io.InterruptedIOException;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import java.text.ParseException;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import java.util.ArrayList;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import java.util.Collections;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import java.util.List;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import java.util.Map;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.TreeMap;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.conf.Configuration;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FileStatus;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.FileSystem;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.Path;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.HConstants;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.TableDescriptors;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.TableName;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.procedure2.util.StringUtils;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.regionserver.RegionServerServices;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.CommonFSUtils;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.FSTableDescriptors;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.RecoverLeaseFSUtils;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.slf4j.Logger;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.slf4j.LoggerFactory;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;<a name="line.67"></a>
+<span class="sourceLineNo">068</span><a name="line.68"></a>
+<span class="sourceLineNo">069</span>/**<a name="line.69"></a>
+<span class="sourceLineNo">070</span> * Split RegionServer WAL files. Splits the WAL into new files,<a name="line.70"></a>
+<span class="sourceLineNo">071</span> * one per region, to be picked up on Region reopen. Deletes the split WAL when finished.<a name="line.71"></a>
+<span class="sourceLineNo">072</span> * See {@link #split(Path, Path, Path, FileSystem, Configuration, WALFactory)} or<a name="line.72"></a>
+<span class="sourceLineNo">073</span> * {@link #splitLogFile(Path, FileStatus, FileSystem, Configuration, CancelableProgressable,<a name="line.73"></a>
+<span class="sourceLineNo">074</span> *   LastSequenceId, SplitLogWorkerCoordination, WALFactory, RegionServerServices)} for<a name="line.74"></a>
+<span class="sourceLineNo">075</span> *   entry-point.<a name="line.75"></a>
+<span class="sourceLineNo">076</span> */<a name="line.76"></a>
+<span class="sourceLineNo">077</span>@InterfaceAudience.Private<a name="line.77"></a>
+<span class="sourceLineNo">078</span>public class WALSplitter {<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  private static final Logger LOG = LoggerFactory.getLogger(WALSplitter.class);<a name="line.79"></a>
+<span class="sourceLineNo">080</span><a name="line.80"></a>
+<span class="sourceLineNo">081</span>  /** By default we retry errors in splitting, rather than skipping. */<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  public static final boolean SPLIT_SKIP_ERRORS_DEFAULT = false;<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  // Parameters for split process<a name="line.84"></a>
+<span class="sourceLineNo">085</span>  protected final Path walDir;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>  protected final FileSystem walFS;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  protected final Configuration conf;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  final Path rootDir;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>  final FileSystem rootFS;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>  final RegionServerServices rsServices;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>  final TableDescriptors tableDescriptors;<a name="line.91"></a>
+<span class="sourceLineNo">092</span><a name="line.92"></a>
+<span class="sourceLineNo">093</span>  // Major subcomponents of the split process.<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  // These are separated into inner classes to make testing easier.<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  OutputSink outputSink;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  private EntryBuffers entryBuffers;<a name="line.96"></a>
+<span class="sourceLineNo">097</span><a name="line.97"></a>
+<span class="sourceLineNo">098</span>  /**<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   * Coordinator for split log. Used by the zk-based log splitter.<a name="line.99"></a>
+<span class="sourceLineNo">100</span>   * Not used by the procedure v2-based log splitter.<a name="line.100"></a>
+<span class="sourceLineNo">101</span>   */<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  private SplitLogWorkerCoordination splitLogWorkerCoordination;<a name="line.102"></a>
+<span class="sourceLineNo">103</span><a name="line.103"></a>
+<span class="sourceLineNo">104</span>  private final WALFactory walFactory;<a name="line.104"></a>
+<span class="sourceLineNo">105</span><a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private MonitoredTask status;<a name="line.106"></a>
 <span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  // Map encodedRegionName -&gt; lastFlushedSequenceId<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  protected Map&lt;String, Long&gt; lastFlushedSequenceIds = new ConcurrentHashMap&lt;&gt;();<a name="line.109"></a>
+<span class="sourceLineNo">108</span>  // For checking the latest flushed sequence id<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  protected final LastSequenceId sequenceIdChecker;<a name="line.109"></a>
 <span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>  // Map encodedRegionName -&gt; maxSeqIdInStores<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  protected Map&lt;String, Map&lt;byte[], Long&gt;&gt; regionMaxSeqIdInStores = new ConcurrentHashMap&lt;&gt;();<a name="line.112"></a>
+<span class="sourceLineNo">111</span>  // Map encodedRegionName -&gt; lastFlushedSequenceId<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  protected Map&lt;String, Long&gt; lastFlushedSequenceIds = new ConcurrentHashMap&lt;&gt;();<a name="line.112"></a>
 <span class="sourceLineNo">113</span><a name="line.113"></a>
-<span class="sourceLineNo">114</span>  // the file being split currently<a name="line.114"></a>
-<span class="sourceLineNo">115</span>  private FileStatus fileBeingSplit;<a name="line.115"></a>
+<span class="sourceLineNo">114</span>  // Map encodedRegionName -&gt; maxSeqIdInStores<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  protected Map&lt;String, Map&lt;byte[], Long&gt;&gt; regionMaxSeqIdInStores = new ConcurrentHashMap&lt;&gt;();<a name="line.115"></a>
 <span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>  private final String tmpDirName;<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  public final static String SPLIT_WRITER_CREATION_BOUNDED = "hbase.split.writer.creation.bounded";<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public final static String SPLIT_WAL_BUFFER_SIZE = "hbase.regionserver.hlog.splitlog.buffersize";<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  public final static String SPLIT_WAL_WRITER_THREADS =<a name="line.121"></a>
-<span class="sourceLineNo">122</span>      "hbase.regionserver.hlog.splitlog.writer.threads";<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span>  @VisibleForTesting<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  WALSplitter(final WALFactory factory, Configuration conf, Path walDir, FileSystem walFS,<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      Path rootDir, FileSystem rootFS, LastSequenceId idChecker,<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      SplitLogWorkerCoordination splitLogWorkerCoordination, RegionServerServices rsServices) {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    this.conf = HBaseConfiguration.create(conf);<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    String codecClassName =<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        conf.get(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    this.conf.set(HConstants.RPC_CODEC_CONF_KEY, codecClassName);<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    this.walDir = walDir;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    this.walFS = walFS;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    this.rootDir = rootDir;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    this.rootFS = rootFS;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    this.sequenceIdChecker = idChecker;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    this.splitLogWorkerCoordination = splitLogWorkerCoordination;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    this.rsServices = rsServices;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    if (rsServices != null) {<a name="line.139"></a>
-<span class="sourceLineNo">140</span>      this.tableDescriptors = rsServices.getTableDescriptors();<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    } else {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      this.tableDescriptors = new FSTableDescriptors(rootFS, rootDir, true, true);<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    }<a name="line.143"></a>
-<span class="sourceLineNo">144</span><a name="line.144"></a>
-<span class="sourceLineNo">145</span>    this.walFactory = factory;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>    PipelineController controller = new PipelineController();<a name="line.146"></a>
-<span class="sourceLineNo">147</span>    this.tmpDirName =<a name="line.147"></a>
-<span class="sourceLineNo">148</span>      conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY, HConstants.DEFAULT_TEMPORARY_HDFS_DIRECTORY);<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>    // if we limit the number of writers opened for sinking recovered edits<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    boolean splitWriterCreationBounded = conf.getBoolean(SPLIT_WRITER_CREATION_BOUNDED, false);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    boolean splitToHFile = conf.getBoolean(WAL_SPLIT_TO_HFILE, DEFAULT_WAL_SPLIT_TO_HFILE);<a name="line.153"></a>
-<span class="sourceLineNo">154</span>    long bufferSize = this.conf.getLong(SPLIT_WAL_BUFFER_SIZE, 128 * 1024 * 1024);<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    int numWriterThreads = this.conf.getInt(SPLIT_WAL_WRITER_THREADS, 3);<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>    if (splitToHFile) {<a name="line.157"></a>
-<span class="sourceLineNo">158</span>      entryBuffers = new BoundedEntryBuffers(controller, bufferSize);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      outputSink =<a name="line.159"></a>
-<span class="sourceLineNo">160</span>          new BoundedRecoveredHFilesOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    } else if (splitWriterCreationBounded) {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      entryBuffers = new BoundedEntryBuffers(controller, bufferSize);<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      outputSink =<a name="line.163"></a>
-<span class="sourceLineNo">164</span>          new BoundedRecoveredEditsOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.164"></a>
-<span class="sourceLineNo">165</span>    } else {<a name="line.165"></a>
-<span class="sourceLineNo">166</span>      entryBuffers = new EntryBuffers(controller, bufferSize);<a name="line.166"></a>
-<span class="sourceLineNo">167</span>      outputSink = new RecoveredEditsOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
-<span class="sourceLineNo">169</span>  }<a name="line.169"></a>
-<span class="sourceLineNo">170</span><a name="line.170"></a>
-<span class="sourceLineNo">171</span>  WALFactory getWalFactory(){<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    return this.walFactory;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>  FileStatus getFileBeingSplit() {<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    return fileBeingSplit;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>  }<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  String getTmpDirName() {<a name="line.179"></a>
-<span class="sourceLineNo">180</span>    return this.tmpDirName;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  }<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>  Map&lt;String, Map&lt;byte[], Long&gt;&gt; getRegionMaxSeqIdInStores() {<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return regionMaxSeqIdInStores;<a name="line.184"></a>
+<span class="sourceLineNo">117</span>  // the file being split currently<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  private FileStatus fileBeingSplit;<a name="line.118"></a>
+<span class="sourceLineNo">119</span><a name="line.119"></a>
+<span class="sourceLineNo">120</span>  private final String tmpDirName;<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  /**<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   * Split WAL directly to hfiles instead of into intermediary 'recovered.edits' files.<a name="line.123"></a>
+<span class="sourceLineNo">124</span>   */<a name="line.124"></a>
+<span class="sourceLineNo">125</span>  public static final String WAL_SPLIT_TO_HFILE = "hbase.wal.split.to.hfile";<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  public static final boolean DEFAULT_WAL_SPLIT_TO_HFILE = false;<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  /**<a name="line.128"></a>
+<span class="sourceLineNo">129</span>   * True if we are to run with bounded amount of writers rather than let the count blossom.<a name="line.129"></a>
+<span class="sourceLineNo">130</span>   * Default is 'false'. Does not apply if you have set 'hbase.wal.split.to.hfile' as that<a name="line.130"></a>
+<span class="sourceLineNo">131</span>   * is always bounded. Only applies when you are doing recovery to 'recovered.edits'<a name="line.131"></a>
+<span class="sourceLineNo">132</span>   * files (the old default). Bounded writing tends to have higher throughput.<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   */<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  public final static String SPLIT_WRITER_CREATION_BOUNDED = "hbase.split.writer.creation.bounded";<a name="line.134"></a>
+<span class="sourceLineNo">135</span><a name="line.135"></a>
+<span class="sourceLineNo">136</span>  public final static String SPLIT_WAL_BUFFER_SIZE = "hbase.regionserver.hlog.splitlog.buffersize";<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  public final static String SPLIT_WAL_WRITER_THREADS =<a name="line.137"></a>
+<span class="sourceLineNo">138</span>      "hbase.regionserver.hlog.splitlog.writer.threads";<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>  @VisibleForTesting<a name="line.140"></a>
+<span class="sourceLineNo">141</span>  WALSplitter(final WALFactory factory, Configuration conf, Path walDir, FileSystem walFS,<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      Path rootDir, FileSystem rootFS, LastSequenceId idChecker,<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      SplitLogWorkerCoordination splitLogWorkerCoordination, RegionServerServices rsServices) {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    this.conf = HBaseConfiguration.create(conf);<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    String codecClassName =<a name="line.145"></a>
+<span class="sourceLineNo">146</span>        conf.get(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    this.conf.set(HConstants.RPC_CODEC_CONF_KEY, codecClassName);<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    this.walDir = walDir;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    this.walFS = walFS;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    this.rootDir = rootDir;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    this.rootFS = rootFS;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    this.sequenceIdChecker = idChecker;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    this.splitLogWorkerCoordination = splitLogWorkerCoordination;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    this.rsServices = rsServices;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    if (rsServices != null) {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      this.tableDescriptors = rsServices.getTableDescriptors();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    } else {<a name="line.157"></a>
+<span class="sourceLineNo">158</span>      this.tableDescriptors = new FSTableDescriptors(rootFS, rootDir, true, true);<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>    this.walFactory = factory;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    PipelineController controller = new PipelineController();<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    this.tmpDirName =<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY, HConstants.DEFAULT_TEMPORARY_HDFS_DIRECTORY);<a name="line.164"></a>
+<span class="sourceLineNo">165</span><a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>    // if we limit the number of writers opened for sinking recovered edits<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    boolean splitWriterCreationBounded = conf.getBoolean(SPLIT_WRITER_CREATION_BOUNDED, false);<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    boolean splitToHFile = conf.getBoolean(WAL_SPLIT_TO_HFILE, DEFAULT_WAL_SPLIT_TO_HFILE);<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    long bufferSize = this.conf.getLong(SPLIT_WAL_BUFFER_SIZE, 128 * 1024 * 1024);<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    int numWriterThreads = this.conf.getInt(SPLIT_WAL_WRITER_THREADS, 3);<a name="line.171"></a>
+<span class="sourceLineNo">172</span><a name="line.172"></a>
+<span class="sourceLineNo">173</span>    if (splitToHFile) {<a name="line.173"></a>
+<span class="sourceLineNo">174</span>      entryBuffers = new BoundedEntryBuffers(controller, bufferSize);<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      outputSink =<a name="line.175"></a>
+<span class="sourceLineNo">176</span>          new BoundedRecoveredHFilesOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.176"></a>
+<span class="sourceLineNo">177</span>    } else if (splitWriterCreationBounded) {<a name="line.177"></a>
+<span class="sourceLineNo">178</span>      entryBuffers = new BoundedEntryBuffers(controller, bufferSize);<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      outputSink =<a name="line.179"></a>
+<span class="sourceLineNo">180</span>          new BoundedRecoveredEditsOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.180"></a>
+<span class="sourceLineNo">181</span>    } else {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>      entryBuffers = new EntryBuffers(controller, bufferSize);<a name="line.182"></a>
+<span class="sourceLineNo">183</span>      outputSink = new RecoveredEditsOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    }<a name="line.184"></a>
 <span class="sourceLineNo">185</span>  }<a name="line.185"></a>
 <span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /**<a name="line.187"></a>
-<span class="sourceLineNo">188</span>   * Splits a WAL file into region's recovered-edits directory.<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * This is the main entry point for distributed log splitting from SplitLogWorker.<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * &lt;p&gt;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * If the log file has N regions then N recovered.edits files will be produced.<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * &lt;p&gt;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * @return false if it is interrupted by the progress-able.<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  public static boolean splitLogFile(Path walDir, FileStatus logfile, FileSystem walFS,<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      Configuration conf, CancelableProgressable reporter, LastSequenceId idChecker,<a name="line.196"></a>
-<span class="sourceLineNo">197</span>      SplitLogWorkerCoordination splitLogWorkerCoordination, WALFactory factory,<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      RegionServerServices rsServices) throws IOException {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    Path rootDir = CommonFSUtils.getRootDir(conf);<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    FileSystem rootFS = rootDir.getFileSystem(conf);<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    WALSplitter s = new WALSplitter(factory, conf, walDir, walFS, rootDir, rootFS, idChecker,<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        splitLogWorkerCoordination, rsServices);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    return s.splitLogFile(logfile, reporter);<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  }<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  // A wrapper to split one log folder using the method used by distributed<a name="line.206"></a>
-<span class="sourceLineNo">207</span>  // log splitting. Used by tools and unit tests. It should be package private.<a name="line.207"></a>
-<span class="sourceLineNo">208</span>  // It is public only because TestWALObserver is in a different package,<a name="line.208"></a>
-<span class="sourceLineNo">209</span>  // which uses this method to do log splitting.<a name="line.209"></a>
-<span class="sourceLineNo">210</span>  @VisibleForTesting<a name="line.210"></a>
-<span class="sourceLineNo">211</span>  public static List&lt;Path&gt; split(Path walDir, Path logDir, Path oldLogDir, FileSystem walFS,<a name="line.211"></a>
-<span class="sourceLineNo">212</span>      Configuration conf, final WALFactory factory) throws IOException {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    Path rootDir = CommonFSUtils.getRootDir(conf);<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    FileSystem rootFS = rootDir.getFileSystem(conf);<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    final FileStatus[] logfiles =<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        SplitLogManager.getFileList(conf, Collections.singletonList(logDir), null);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    List&lt;Path&gt; splits = new ArrayList&lt;&gt;();<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    if (ArrayUtils.isNotEmpty(logfiles)) {<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      for (FileStatus logfile : logfiles) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>        WALSplitter s =<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            new WALSplitter(factory, conf, walDir, walFS, rootDir, rootFS, null, null, null);<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        if (s.splitLogFile(logfile, null)) {<a name="line.222"></a>
-<span class="sourceLineNo">223</span>          finishSplitLogFile(walDir, oldLogDir, logfile.getPath(), conf);<a name="line.223"></a>
-<span class="sourceLineNo">224</span>          if (s.outputSink.splits != null) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>            splits.addAll(s.outputSink.splits);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>          }<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        }<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    }<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    if (!walFS.delete(logDir, true)) {<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      throw new IOException("Unable to delete src dir: " + logDir);<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    }<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    return splits;<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  }<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /**<a name="line.236"></a>
-<span class="sourceLineNo">237</span>   * log splitting implementation, splits one log file.<a name="line.237"></a>
-<span class="sourceLineNo">238</span>   * @param logfile should be an actual log file.<a name="line.238"></a>
-<span class="sourceLineNo">239</span>   */<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  @VisibleForTesting<a name="line.240"></a>
-<span class="sourceLineNo">241</span>  boolean splitLogFile(FileStatus logfile, CancelableProgressable reporter) throws IOException {<a name="line.241"></a>
-<span class="sourceLineNo">242</span>    Preconditions.checkState(status == null);<a name="line.242"></a>
-<span class="sourceLineNo">243</span>    Preconditions.checkArgument(logfile.isFile(),<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        "passed in file status is for something other than a regular file.");<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    boolean isCorrupted = false;<a name="line.245"></a>
-<span class="sourceLineNo">246</span>    boolean skipErrors = conf.getBoolean("hbase.hlog.split.skip.errors",<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      SPLIT_SKIP_ERRORS_DEFAULT);<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    int interval = conf.getInt("hbase.splitlog.report.interval.loglines", 1024);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    Path logPath = logfile.getPath();<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    boolean outputSinkStarted = false;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    boolean progressFailed = false;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    int editsCount = 0;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    int editsSkipped = 0;<a name="line.253"></a>
-<span class="sourceLineNo">254</span><a name="line.254"></a>
-<span class="sourceLineNo">255</span>    status = TaskMonitor.get().createStatus(<a name="line.255"></a>
-<span class="sourceLineNo">256</span>          "Splitting log file " + logfile.getPath() + "into a temporary staging area.");<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    Reader logFileReader = null;<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    this.fileBeingSplit = logfile;<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    long startTS = EnvironmentEdgeManager.currentTime();<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    try {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      long logLength = logfile.getLen();<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      LOG.info("Splitting WAL={}, size={} ({} bytes)", logPath, StringUtils.humanSize(logLength),<a name="line.262"></a>
-<span class="sourceLineNo">263</span>          logLength);<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      status.setStatus("Opening log file");<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>        progressFailed = true;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        return false;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      }<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      logFileReader = getReader(logfile, skipErrors, reporter);<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      if (logFileReader == null) {<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        LOG.warn("Nothing to split in WAL={}", logPath);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        return true;<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      }<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      long openCost = EnvironmentEdgeManager.currentTime() - startTS;<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      LOG.info("Open WAL={} cost {} ms", logPath, openCost);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      int numOpenedFilesBeforeReporting = conf.getInt("hbase.splitlog.report.openedfiles", 3);<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      int numOpenedFilesLastCheck = 0;<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      outputSink.setReporter(reporter);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      outputSink.startWriterThreads();<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      outputSinkStarted = true;<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      Entry entry;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      Long lastFlushedSequenceId = -1L;<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      startTS = EnvironmentEdgeManager.currentTime();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>      while ((entry = getNextLogLine(logFileReader, logPath, skipErrors)) != null) {<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        byte[] region = entry.getKey().getEncodedRegionName();<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        String encodedRegionNameAsStr = Bytes.toString(region);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>        lastFlushedSequenceId = lastFlushedSequenceIds.get(encodedRegionNameAsStr);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>        if (lastFlushedSequenceId == null) {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>          if (!(isRegionDirPresentUnderRoot(entry.getKey().getTableName(), encodedRegionNameAsStr))) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>            // The region directory itself is not present in the FS. This indicates that<a name="line.290"></a>
-<span class="sourceLineNo">291</span>            // the region/table is already removed. We can just skip all the edits for this<a name="line.291"></a>
-<span class="sourceLineNo">292</span>            // region. Setting lastFlushedSequenceId as Long.MAX_VALUE so that all edits<a name="line.292"></a>
-<span class="sourceLineNo">293</span>            // will get skipped by the seqId check below.<a name="line.293"></a>
-<span class="sourceLineNo">294</span>            // See more details at https://issues.apache.org/jira/browse/HBASE-24189<a name="line.294"></a>
-<span class="sourceLineNo">295</span>            LOG.info("{} no longer available in the FS. Skipping all edits for this region.",<a name="line.295"></a>
-<span class="sourceLineNo">296</span>                encodedRegionNameAsStr);<a name="line.296"></a>
-<span class="sourceLineNo">297</span>            lastFlushedSequenceId = Long.MAX_VALUE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>          } else {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>            if (sequenceIdChecker != null) {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>              RegionStoreSequenceIds ids = sequenceIdChecker.getLastSequenceId(region);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>              Map&lt;byte[], Long&gt; maxSeqIdInStores = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.301"></a>
-<span class="sourceLineNo">302</span>              for (StoreSequenceId storeSeqId : ids.getStoreSequenceIdList()) {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>                maxSeqIdInStores.put(storeSeqId.getFamilyName().toByteArray(),<a name="line.303"></a>
-<span class="sourceLineNo">304</span>                    storeSeqId.getSequenceId());<a name="line.304"></a>
-<span class="sourceLineNo">305</span>              }<a name="line.305"></a>
-<span class="sourceLineNo">306</span>              regionMaxSeqIdInStores.put(encodedRegionNameAsStr, maxSeqIdInStores);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>              lastFlushedSequenceId = ids.getLastFlushedSequenceId();<a name="line.307"></a>
-<span class="sourceLineNo">308</span>              if (LOG.isDebugEnabled()) {<a name="line.308"></a>
-<span class="sourceLineNo">309</span>                LOG.debug("DLS Last flushed sequenceid for " + encodedRegionNameAsStr + ": "<a name="line.309"></a>
-<span class="sourceLineNo">310</span>                    + TextFormat.shortDebugString(ids));<a name="line.310"></a>
-<span class="sourceLineNo">311</span>              }<a name="line.311"></a>
-<span class="sourceLineNo">312</span>            }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>            if (lastFlushedSequenceId == null) {<a name="line.313"></a>
-<span class="sourceLineNo">314</span>              lastFlushedSequenceId = -1L;<a name="line.314"></a>
-<span class="sourceLineNo">315</span>            }<a name="line.315"></a>
-<span class="sourceLineNo">316</span>          }<a name="line.316"></a>
-<span class="sourceLineNo">317</span>          lastFlushedSequenceIds.put(encodedRegionNameAsStr, lastFlushedSequenceId);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>        }<a name="line.318"></a>
-<span class="sourceLineNo">319</span>        if (lastFlushedSequenceId &gt;= entry.getKey().getSequenceId()) {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>          editsSkipped++;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>          continue;<a name="line.321"></a>
-<span class="sourceLineNo">322</span>        }<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        // Don't send Compaction/Close/Open region events to recovered edit type sinks.<a name="line.323"></a>
-<span class="sourceLineNo">324</span>        if (entry.getEdit().isMetaEdit() &amp;&amp; !outputSink.keepRegionEvent(entry)) {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>          editsSkipped++;<a name="line.325"></a>
-<span class="sourceLineNo">326</span>          continue;<a name="line.326"></a>
-<span class="sourceLineNo">327</span>        }<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        entryBuffers.appendEntry(entry);<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        editsCount++;<a name="line.329"></a>
-<span class="sourceLineNo">330</span>        int moreWritersFromLastCheck = this.getNumOpenWriters() - numOpenedFilesLastCheck;<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        // If sufficient edits have passed, check if we should report progress.<a name="line.331"></a>
-<span class="sourceLineNo">332</span>        if (editsCount % interval == 0<a name="line.332"></a>
-<span class="sourceLineNo">333</span>            || moreWritersFromLastCheck &gt; numOpenedFilesBeforeReporting) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          numOpenedFilesLastCheck = this.getNumOpenWriters();<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          String countsStr = (editsCount - (editsSkipped + outputSink.getTotalSkippedEdits()))<a name="line.335"></a>
-<span class="sourceLineNo">336</span>              + " edits, skipped " + editsSkipped + " edits.";<a name="line.336"></a>
-<span class="sourceLineNo">337</span>          status.setStatus("Split " + countsStr);<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.338"></a>
-<span class="sourceLineNo">339</span>            progressFailed = true;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>            return false;<a name="line.340"></a>
-<span class="sourceLineNo">341</span>          }<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        }<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      }<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    } catch (InterruptedException ie) {<a name="line.344"></a>
-<span class="sourceLineNo">345</span>      IOException iie = new InterruptedIOException();<a name="line.345"></a>
-<span class="sourceLineNo">346</span>      iie.initCause(ie);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      throw iie;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    } catch (CorruptedLogFileException e) {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      LOG.warn("Could not parse, corrupted WAL={}", logPath, e);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>      if (splitLogWorkerCoordination != null) {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>        // Some tests pass in a csm of null.<a name="line.351"></a>
-<span class="sourceLineNo">352</span>        splitLogWorkerCoordination.markCorrupted(walDir, logfile.getPath().getName(), walFS);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      } else {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>        // for tests only<a name="line.354"></a>
-<span class="sourceLineNo">355</span>        ZKSplitLog.markCorrupted(walDir, logfile.getPath().getName(), walFS);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      }<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      isCorrupted = true;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    } catch (IOException e) {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      e = e instanceof RemoteException ? ((RemoteException) e).unwrapRemoteException() : e;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>      throw e;<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    } finally {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>      LOG.debug("Finishing writing output logs and closing down");<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      try {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>        if (null != logFileReader) {<a name="line.364"></a>
-<span class="sourceLineNo">365</span>          logFileReader.close();<a name="line.365"></a>
-<span class="sourceLineNo">366</span>        }<a name="line.366"></a>
-<span class="sourceLineNo">367</span>      } catch (IOException exception) {<a name="line.367"></a>
-<span class="sourceLineNo">368</span>        LOG.warn("Could not close WAL reader", exception);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
-<span class="sourceLineNo">370</span>      try {<a name="line.370"></a>
-<span class="sourceLineNo">371</span>        if (outputSinkStarted) {<a name="line.371"></a>
-<span class="sourceLineNo">372</span>          // Set progress_failed to true as the immediate following statement will reset its value<a name="line.372"></a>
-<span class="sourceLineNo">373</span>          // when close() throws exception, progress_failed has the right value<a name="line.373"></a>
-<span class="sourceLineNo">374</span>          progressFailed = true;<a name="line.374"></a>
-<span class="sourceLineNo">375</span>          progressFailed = outputSink.close() == null;<a name="line.375"></a>
-<span class="sourceLineNo">376</span>        }<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      } finally {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>        long processCost = EnvironmentEdgeManager.currentTime() - startTS;<a name="line.378"></a>
-<span class="sourceLineNo">379</span>        // See if length got updated post lease recovery<a name="line.379"></a>
-<span class="sourceLineNo">380</span>        String msg = "Processed " + editsCount + " edits across " +<a name="line.380"></a>
-<span class="sourceLineNo">381</span>            outputSink.getNumberOfRecoveredRegions() + " regions cost " + processCost +<a name="line.381"></a>
-<span class="sourceLineNo">382</span>            " ms; edits skipped=" + editsSkipped + "; WAL=" + logPath + ", size=" +<a name="line.382"></a>
-<span class="sourceLineNo">383</span>            StringUtils.humanSize(logfile.getLen()) + ", length=" + logfile.getLen() +<a name="line.383"></a>
-<span class="sourceLineNo">384</span>            ", corrupted=" + isCorrupted + ", progress failed=" + progressFailed;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>        LOG.info(msg);<a name="line.385"></a>
-<span class="sourceLineNo">386</span>        status.markComplete(msg);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      }<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    }<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    return !progressFailed;<a name="line.389"></a>
-<span class="sourceLineNo">390</span>  }<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>  private boolean isRegionDirPresentUnderRoot(TableName tableName, String regionName)<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      throws IOException {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    Path regionDirPath = CommonFSUtils.getRegionDir(this.rootDir, tableName, regionName);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    return this.rootFS.exists(regionDirPath);<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * Create a new {@link Reader} for reading logs to split.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private Reader getReader(FileStatus file, boolean skipErrors, CancelableProgressable reporter)<a name="line.401"></a>
-<span class="sourceLineNo">402</span>      throws IOException, CorruptedLogFileException {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>    Path path = file.getPath();<a name="line.403"></a>
-<span class="sourceLineNo">404</span>    long length = file.getLen();<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    Reader in;<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>    // Check for possibly empty file. With appends, currently Hadoop reports a<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    // zero length even if the file has been sync'd. Revisit if HDFS-376 or<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    // HDFS-878 is committed.<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    if (length &lt;= 0) {<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      LOG.warn("File {} might be still open, length is 0", path);<a name="line.411"></a>
-<span class="sourceLineNo">412</span>    }<a name="line.412"></a>
+<span class="sourceLineNo">187</span>  WALFactory getWalFactory(){<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    return this.walFactory;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>  }<a name="line.189"></a>
+<span class="sourceLineNo">190</span><a name="line.190"></a>
+<span class="sourceLineNo">191</span>  FileStatus getFileBeingSplit() {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>    return fileBeingSplit;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>  }<a name="line.193"></a>
+<span class="sourceLineNo">194</span><a name="line.194"></a>
+<span class="sourceLineNo">195</span>  String getTmpDirName() {<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    return this.tmpDirName;<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  }<a name="line.197"></a>
+<span class="sourceLineNo">198</span><a name="line.198"></a>
+<span class="sourceLineNo">199</span>  Map&lt;String, Map&lt;byte[], Long&gt;&gt; getRegionMaxSeqIdInStores() {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    return regionMaxSeqIdInStores;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>  }<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>  /**<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   * Splits a WAL file.<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * @return false if it is interrupted by the progress-able.<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   */<a name="line.206"></a>
+<span class="sourceLineNo">207</span>  public static boolean splitLogFile(Path walDir, FileStatus logfile, FileSystem walFS,<a name="line.207"></a>
+<span class="sourceLineNo">208</span>      Configuration conf, CancelableProgressable reporter, LastSequenceId idChecker,<a name="line.208"></a>
+<span class="sourceLineNo">209</span>      SplitLogWorkerCoordination splitLogWorkerCoordination, WALFactory factory,<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      RegionServerServices rsServices) throws IOException {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    Path rootDir = CommonFSUtils.getRootDir(conf);<a name="line.211"></a>
+<span class="sourceLineNo">212</span>    FileSystem rootFS = rootDir.getFileSystem(conf);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>    WALSplitter s = new WALSplitter(factory, conf, walDir, walFS, rootDir, rootFS, idChecker,<a name="line.213"></a>
+<span class="sourceLineNo">214</span>        splitLogWorkerCoordination, rsServices);<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    return s.splitLogFile(logfile, reporter);<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  }<a name="line.216"></a>
+<span class="sourceLineNo">217</span><a name="line.217"></a>
+<span class="sourceLineNo">218</span>  /**<a name="line.218"></a>
+<span class="sourceLineNo">219</span>   * Split a folder of WAL files. Delete the directory when done.<a name="line.219"></a>
+<span class="sourceLineNo">220</span>   * Used by tools and unit tests. It should be package private.<a name="line.220"></a>
+<span class="sourceLineNo">221</span>   * It is public only because TestWALObserver is in a different package,<a name="line.221"></a>
+<span class="sourceLineNo">222</span>   * which uses this method to do log splitting.<a name="line.222"></a>
+<span class="sourceLineNo">223</span>   * @return List of output files created by the split.<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   */<a name="line.224"></a>
+<span class="sourceLineNo">225</span>  @VisibleForTesting<a name="line.225"></a>
+<span class="sourceLineNo">226</span>  public static List&lt;Path&gt; split(Path walDir, Path logDir, Path oldLogDir, FileSystem walFS,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>      Configuration conf, final WALFactory factory) throws IOException {<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    Path rootDir = CommonFSUtils.getRootDir(conf);<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    FileSystem rootFS = rootDir.getFileSystem(conf);<a name="line.229"></a>
+<span class="sourceLineNo">230</span>    final FileStatus[] logfiles =<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        SplitLogManager.getFileList(conf, Collections.singletonList(logDir), null);<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    List&lt;Path&gt; splits = new ArrayList&lt;&gt;();<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    if (ArrayUtils.isNotEmpty(logfiles)) {<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      for (FileStatus logfile : logfiles) {<a name="line.234"></a>
+<span class="sourceLineNo">235</span>        WALSplitter s =<a name="line.235"></a>
+<span class="sourceLineNo">236</span>            new WALSplitter(factory, conf, walDir, walFS, rootDir, rootFS, null, null, null);<a name="line.236"></a>
+<span class="sourceLineNo">237</span>        if (s.splitLogFile(logfile, null)) {<a name="line.237"></a>
+<span class="sourceLineNo">238</span>          finishSplitLogFile(walDir, oldLogDir, logfile.getPath(), conf);<a name="line.238"></a>
+<span class="sourceLineNo">239</span>          if (s.outputSink.splits != null) {<a name="line.239"></a>
+<span class="sourceLineNo">240</span>            splits.addAll(s.outputSink.splits);<a name="line.240"></a>
+<span class="sourceLineNo">241</span>          }<a name="line.241"></a>
+<span class="sourceLineNo">242</span>        }<a name="line.242"></a>
+<span class="sourceLineNo">243</span>      }<a name="line.243"></a>
+<span class="sourceLineNo">244</span>    }<a name="line.244"></a>
+<span class="sourceLineNo">245</span>    if (!walFS.delete(logDir, true)) {<a name="line.245"></a>
+<span class="sourceLineNo">246</span>      throw new IOException("Unable to delete src dir: " + logDir);<a name="line.246"></a>
+<span class="sourceLineNo">247</span>    }<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    return splits;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  }<a name="line.249"></a>
+<span class="sourceLineNo">250</span><a name="line.250"></a>
+<span class="sourceLineNo">251</span>  /**<a name="line.251"></a>
+<span class="sourceLineNo">252</span>   * WAL splitting implementation, splits one log file.<a name="line.252"></a>
+<span class="sourceLineNo">253</span>   * @param logfile should be an actual log file.<a name="line.253"></a>
+<span class="sourceLineNo">254</span>   */<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  @VisibleForTesting<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  boolean splitLogFile(FileStatus logfile, CancelableProgressable reporter) throws IOException {<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    Preconditions.checkState(status == null);<a name="line.257"></a>
+<span class="sourceLineNo">258</span>    Preconditions.checkArgument(logfile.isFile(),<a name="line.258"></a>
+<span class="sourceLineNo">259</span>        "passed in file status is for something other than a regular file.");<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    boolean isCorrupted = false;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    boolean skipErrors = conf.getBoolean("hbase.hlog.split.skip.errors",<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      SPLIT_SKIP_ERRORS_DEFAULT);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    int interval = conf.getInt("hbase.splitlog.report.interval.loglines", 1024);<a name="line.263"></a>
+<span class="sourceLineNo">264</span>    Path logPath = logfile.getPath();<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    boolean outputSinkStarted = false;<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    boolean progressFailed = false;<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    int editsCount = 0;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    int editsSkipped = 0;<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>    status = TaskMonitor.get().createStatus(<a name="line.270"></a>
+<span class="sourceLineNo">271</span>          "Splitting log file " + logfile.getPath() + "into a temporary staging area.");<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    Reader logFileReader = null;<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    this.fileBeingSplit = logfile;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    long startTS = EnvironmentEdgeManager.currentTime();<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    try {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      long logLength = logfile.getLen();<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      LOG.info("Splitting WAL={}, size={} ({} bytes)", logPath, StringUtils.humanSize(logLength),<a name="line.277"></a>
+<span class="sourceLineNo">278</span>          logLength);<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      status.setStatus("Opening log file");<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>        progressFailed = true;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>        return false;<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      }<a name="line.283"></a>
+<span class="sourceLineNo">284</span>      logFileReader = getReader(logfile, skipErrors, reporter);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      if (logFileReader == null) {<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        LOG.warn("Nothing to split in WAL={}", logPath);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>        return true;<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      }<a name="line.288"></a>
+<span class="sourceLineNo">289</span>      long openCost = EnvironmentEdgeManager.currentTime() - startTS;<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      LOG.info("Open WAL={} cost {} ms", logPath, openCost);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      int numOpenedFilesBeforeReporting = conf.getInt("hbase.splitlog.report.openedfiles", 3);<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      int numOpenedFilesLastCheck = 0;<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      outputSink.setReporter(reporter);<a name="line.293"></a>
+<span class="sourceLineNo">294</span>      outputSink.startWriterThreads();<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      outputSinkStarted = true;<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      Entry entry;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      Long lastFlushedSequenceId = -1L;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>      startTS = EnvironmentEdgeManager.currentTime();<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      while ((entry = getNextLogLine(logFileReader, logPath, skipErrors)) != null) {<a name="line.299"></a>
+<span class="sourceLineNo">300</span>        byte[] region = entry.getKey().getEncodedRegionName();<a name="line.300"></a>
+<span class="sourceLineNo">301</span>        String encodedRegionNameAsStr = Bytes.toString(region);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>        lastFlushedSequenceId = lastFlushedSequenceIds.get(encodedRegionNameAsStr);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        if (lastFlushedSequenceId == null) {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>          if (!(isRegionDirPresentUnderRoot(entry.getKey().getTableName(),<a name="line.304"></a>
+<span class="sourceLineNo">305</span>              encodedRegionNameAsStr))) {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>            // The region directory itself is not present in the FS. This indicates that<a name="line.306"></a>
+<span class="sourceLineNo">307</span>            // the region/table is already removed. We can just skip all the edits for this<a name="line.307"></a>
+<span class="sourceLineNo">308</span>            // region. Setting lastFlushedSequenceId as Long.MAX_VALUE so that all edits<a name="line.308"></a>
+<span class="sourceLineNo">309</span>            // will get skipped by the seqId check below.<a name="line.309"></a>
+<span class="sourceLineNo">310</span>            // See more details at https://issues.apache.org/jira/browse/HBASE-24189<a name="line.310"></a>
+<span class="sourceLineNo">311</span>            LOG.info("{} no longer available in the FS. Skipping all edits for this region.",<a name="line.311"></a>
+<span class="sourceLineNo">312</span>                encodedRegionNameAsStr);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>            lastFlushedSequenceId = Long.MAX_VALUE;<a name="line.313"></a>
+<span class="sourceLineNo">314</span>          } else {<a name="line.314"></a>
+<span class="sourceLineNo">315</span>            if (sequenceIdChecker != null) {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>              RegionStoreSequenceIds ids = sequenceIdChecker.getLastSequenceId(region);<a name="line.316"></a>
+<span class="sourceLineNo">317</span>              Map&lt;byte[], Long&gt; maxSeqIdInStores = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.317"></a>
+<span class="sourceLineNo">318</span>              for (StoreSequenceId storeSeqId : ids.getStoreSequenceIdList()) {<a name="line.318"></a>
+<span class="sourceLineNo">319</span>                maxSeqIdInStores.put(storeSeqId.getFamilyName().toByteArray(),<a name="line.319"></a>
+<span class="sourceLineNo">320</span>                    storeSeqId.getSequenceId());<a name="line.320"></a>
+<span class="sourceLineNo">321</span>              }<a name="line.321"></a>
+<span class="sourceLineNo">322</span>              regionMaxSeqIdInStores.put(encodedRegionNameAsStr, maxSeqIdInStores);<a name="line.322"></a>
+<span class="sourceLineNo">323</span>              lastFlushedSequenceId = ids.getLastFlushedSequenceId();<a name="line.323"></a>
+<span class="sourceLineNo">324</span>              if (LOG.isDebugEnabled()) {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>                LOG.debug("DLS Last flushed sequenceid for " + encodedRegionNameAsStr + ": "<a name="line.325"></a>
+<span class="sourceLineNo">326</span>                    + TextFormat.shortDebugString(ids));<a name="line.326"></a>
+<span class="sourceLineNo">327</span>              }<a name="line.327"></a>
+<span class="sourceLineNo">328</span>            }<a name="line.328"></a>
+<span class="sourceLineNo">329</span>            if (lastFlushedSequenceId == null) {<a name="line.329"></a>
+<span class="sourceLineNo">330</span>              lastFlushedSequenceId = -1L;<a name="line.330"></a>
+<span class="sourceLineNo">331</span>            }<a name="line.331"></a>
+<span class="sourceLineNo">332</span>          }<a name="line.332"></a>
+<span class="sourceLineNo">333</span>          lastFlushedSequenceIds.put(encodedRegionNameAsStr, lastFlushedSequenceId);<a name="line.333"></a>
+<span class="sourceLineNo">334</span>        }<a name="line.334"></a>
+<span class="sourceLineNo">335</span>        if (lastFlushedSequenceId &gt;= entry.getKey().getSequenceId()) {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>          editsSkipped++;<a name="line.336"></a>
+<span class="sourceLineNo">337</span>          continue;<a name="line.337"></a>
+<span class="sourceLineNo">338</span>        }<a name="line.338"></a>
+<span class="sourceLineNo">339</span>        // Don't send Compaction/Close/Open region events to recovered edit type sinks.<a name="line.339"></a>
+<span class="sourceLineNo">340</span>        if (entry.getEdit().isMetaEdit() &amp;&amp; !outputSink.keepRegionEvent(entry)) {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>          editsSkipped++;<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          continue;<a name="line.342"></a>
+<span class="sourceLineNo">343</span>        }<a name="line.343"></a>
+<span class="sourceLineNo">344</span>        entryBuffers.appendEntry(entry);<a name="line.344"></a>
+<span class="sourceLineNo">345</span>        editsCount++;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>        int moreWritersFromLastCheck = this.getNumOpenWriters() - numOpenedFilesLastCheck;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        // If sufficient edits have passed, check if we should report progress.<a name="line.347"></a>
+<span class="sourceLineNo">348</span>        if (editsCount % interval == 0<a name="line.348"></a>
+<span class="sourceLineNo">349</span>            || moreWritersFromLastCheck &gt; numOpenedFilesBeforeReporting) {<a name="line.349"></a>
+<span class="sourceLineNo">350</span>          numOpenedFilesLastCheck = this.getNumOpenWriters();<a name="line.350"></a>
+<span class="sourceLineNo">351</span>          String countsStr = (editsCount - (editsSkipped + outputSink.getTotalSkippedEdits()))<a name="line.351"></a>
+<span class="sourceLineNo">352</span>              + " edits, skipped " + editsSkipped + " edits.";<a name="line.352"></a>
+<span class="sourceLineNo">353</span>          status.setStatus("Split " + countsStr);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>          if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.354"></a>
+<span class="sourceLineNo">355</span>            progressFailed = true;<a name="line.355"></a>
+<span class="sourceLineNo">356</span>            return false;<a name="line.356"></a>
+<span class="sourceLineNo">357</span>          }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>        }<a name="line.358"></a>
+<span class="sourceLineNo">359</span>      }<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    } catch (InterruptedException ie) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      IOException iie = new InterruptedIOException();<a name="line.361"></a>
+<span class="sourceLineNo">362</span>      iie.initCause(ie);<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      throw iie;<a name="line.363"></a>
+<span class="sourceLineNo">364</span>    } catch (CorruptedLogFileException e) {<a name="line.364"></a>
+<span class="sourceLineNo">365</span>      LOG.warn("Could not parse, corrupted WAL={}", logPath, e);<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      if (splitLogWorkerCoordination != null) {<a name="line.366"></a>
+<span class="sourceLineNo">367</span>        // Some tests pass in a csm of null.<a name="line.367"></a>
+<span class="sourceLineNo">368</span>        splitLogWorkerCoordination.markCorrupted(walDir, logfile.getPath().getName(), walFS);<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      } else {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>        // for tests only<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        ZKSplitLog.markCorrupted(walDir, logfile.getPath().getName(), walFS);<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      }<a name="line.372"></a>
+<span class="sourceLineNo">373</span>      isCorrupted = true;<a name="line.373"></a>
+<span class="sourceLineNo">374</span>    } catch (IOException e) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      e = e instanceof RemoteException ? ((RemoteException) e).unwrapRemoteException() : e;<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      throw e;<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    } finally {<a name="line.377"></a>
+<span class="sourceLineNo">378</span>      LOG.debug("Finishing writing output logs and closing down");<a name="line.378"></a>
+<span class="sourceLineNo">379</span>      try {<a name="line.379"></a>
+<span class="sourceLineNo">380</span>        if (null != logFileReader) {<a name="line.380"></a>
+<span class="sourceLineNo">381</span>          logFileReader.close();<a name="line.381"></a>
+<span class="sourceLineNo">382</span>        }<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      } catch (IOException exception) {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>        LOG.warn("Could not close WAL reader", exception);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      try {<a name="line.386"></a>
+<span class="sourceLineNo">387</span>        if (outputSinkStarted) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>          // Set progress_failed to true as the immediate following statement will reset its value<a name="line.388"></a>
+<span class="sourceLineNo">389</span>          // when close() throws exception, progress_failed has the right value<a name="line.389"></a>
+<span class="sourceLineNo">390</span>          progressFailed = true;<a name="line.390"></a>
+<span class="sourceLineNo">391</span>          progressFailed = outputSink.close() == null;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>        }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      } finally {<a name="line.393"></a>
+<span class="sourceLineNo">394</span>        long processCost = EnvironmentEdgeManager.currentTime() - startTS;<a name="line.394"></a>
+<span class="sourceLineNo">395</span>        // See if length got updated post lease recovery<a name="line.395"></a>
+<span class="sourceLineNo">396</span>        String msg = "Processed " + editsCount + " edits across " +<a name="line.396"></a>
+<span class="sourceLineNo">397</span>            outputSink.getNumberOfRecoveredRegions() + " regions cost " + processCost +<a name="line.397"></a>
+<span class="sourceLineNo">398</span>            " ms; edits skipped=" + editsSkipped + "; WAL=" + logPath + ", size=" +<a name="line.398"></a>
+<span class="sourceLineNo">399</span>            StringUtils.humanSize(logfile.getLen()) + ", length=" + logfile.getLen() +<a name="line.399"></a>
+<span class="sourceLineNo">400</span>            ", corrupted=" + isCorrupted + ", progress failed=" + progressFailed;<a name="line.400"></a>
+<span class="sourceLineNo">401</span>        LOG.info(msg);<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        status.markComplete(msg);<a name="line.402"></a>
+<span class="sourceLineNo">403</span>      }<a name="line.403"></a>
+<span class="sourceLineNo">404</span>    }<a name="line.404"></a>
+<span class="sourceLineNo">405</span>    return !progressFailed;<a name="line.405"></a>
+<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
+<span class="sourceLineNo">407</span><a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private boolean isRegionDirPresentUnderRoot(TableName tableName, String regionName)<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      throws IOException {<a name="line.409"></a>
+<span class="sourceLineNo">410</span>    Path regionDirPath = CommonFSUtils.getRegionDir(this.rootDir, tableName, regionName);<a name="line.410"></a>
+<span class="sourceLineNo">411</span>    return this.rootFS.exists(regionDirPath);<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
 <span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>    try {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>      RecoverLeaseFSUtils.recoverFileLease(walFS, path, conf, reporter);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>      try {<a name="line.416"></a>
-<span class="sourceLineNo">417</span>        in = getReader(path, reporter);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      } catch (EOFException e) {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>        if (length &lt;= 0) {<a name="line.419"></a>
-<span class="sourceLineNo">420</span>          // TODO should we ignore an empty, not-last log file if skip.errors<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          // is false? Either way, the caller should decide what to do. E.g.<a name="line.421"></a>
-<span class="sourceLineNo">422</span>          // ignore if this is the last log in sequence.<a name="line.422"></a>
-<span class="sourceLineNo">423</span>          // TODO is this scenario still possible if the log has been<a name="line.423"></a>
-<span class="sourceLineNo">424</span>          // recovered (i.e. closed)<a name="line.424"></a>
-<span class="sourceLineNo">425</span>          LOG.warn("Could not open {} for reading. File is empty", path, e);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        }<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        // EOFException being ignored<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        return null;<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      }<a name="line.429"></a>
-<span class="sourceLineNo">430</span>    } catch (IOException e) {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>      if (e instanceof FileNotFoundException) {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        // A wal file may not exist anymore. Nothing can be recovered so move on<a name="line.432"></a>
-<span class="sourceLineNo">433</span>        LOG.warn("File {} does not exist anymore", path, e);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>        return null;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      }<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      if (!skipErrors || e instanceof InterruptedIOException) {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>        throw e; // Don't mark the file corrupted if interrupted, or not skipErrors<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      }<a name="line.438"></a>
-<span class="sourceLineNo">439</span>      throw new CorruptedLogFileException("skipErrors=true Could not open wal "<a name="line.439"></a>
-<span class="sourceLineNo">440</span>        + path + " ignoring", e);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
-<span class="sourceLineNo">442</span>    return in;<a name="line.442"></a>
-<span class="sourceLineNo">443</span>  }<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>  private Entry getNextLogLine(Reader in, Path path, boolean skipErrors)<a name="line.445"></a>
-<span class="sourceLineNo">446</span>      throws CorruptedLogFileException, IOException {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>    try {<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      return in.next();<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    } catch (EOFException eof) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      // truncated files are expected if a RS crashes (see HBASE-2643)<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      LOG.info("EOF from wal {}. Continuing.", path);<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      return null;<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    } catch (IOException e) {<a name="line.453"></a>
-<span class="sourceLineNo">454</span>      // If the IOE resulted from bad file format,<a name="line.454"></a>
-<span class="sourceLineNo">455</span>      // then this problem is idempotent and retrying won't help<a name="line.455"></a>
-<span class="sourceLineNo">456</span>      if (e.getCause() != null &amp;&amp; (e.getCause() instanceof ParseException<a name="line.456"></a>
-<span class="sourceLineNo">457</span>          || e.getCause() instanceof org.apache.hadoop.fs.ChecksumException)) {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        LOG.warn("Parse exception from wal {}. Continuing", path, e);<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        return null;<a name="line.459"></a>
-<span class="sourceLineNo">460</span>      }<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      if (!skipErrors) {<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        throw e;<a name="line.462"></a>
-<span class="sourceLineNo">463</span>      }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      throw new CorruptedLogFileException("skipErrors=true Ignoring exception"<a name="line.464"></a>
-<span class="sourceLineNo">465</span>        + " while parsing wal " + path + ". Marking as corrupted", e);<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    }<a name="line.466"></a>
-<span class="sourceLineNo">467</span>  }<a name="line.467"></a>
-<span class="sourceLineNo">468</span><a name="line.468"></a>
-<span class="sourceLineNo">469</span>  /**<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   * Create a new {@link WALProvider.Writer} for writing log splits.<a name="line.470"></a>
-<span class="sourceLineNo">471</span>   * @return a new Writer instance, caller should close<a name="line.471"></a>
-<span class="sourceLineNo">472</span>   */<a name="line.472"></a>
-<span class="sourceLineNo">473</span>  protected WALProvider.Writer createWriter(Path logfile) throws IOException {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    return walFactory.createRecoveredEditsWriter(walFS, logfile);<a name="line.474"></a>
-<span class="sourceLineNo">475</span>  }<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>  /**<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * Create a new {@link Reader} for reading logs to split.<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @return new Reader instance, caller should close<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   */<a name="line.480"></a>
-<span class="sourceLineNo">481</span>  protected Reader getReader(Path curLogFile, CancelableProgressable reporter) throws IOException {<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    return walFactory.createReader(walFS, curLogFile, reporter);<a name="line.482"></a>
+<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   * Create a new {@link Reader} for reading logs to split.<a name="line.415"></a>
+<span class="sourceLineNo">416</span>   */<a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private Reader getReader(FileStatus file, boolean skipErrors, CancelableProgressable reporter)<a name="line.417"></a>
+<span class="sourceLineNo">418</span>      throws IOException, CorruptedLogFileException {<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    Path path = file.getPath();<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    long length = file.getLen();<a name="line.420"></a>
+<span class="sourceLineNo">421</span>    Reader in;<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>    // Check for possibly empty file. With appends, currently Hadoop reports a<a name="line.423"></a>
+<span class="sourceLineNo">424</span>    // zero length even if the file has been sync'd. Revisit if HDFS-376 or<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    // HDFS-878 is committed.<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    if (length &lt;= 0) {<a name="line.426"></a>
+<span class="sourceLineNo">427</span>      LOG.warn("File {} might be still open, length is 0", path);<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    }<a name="line.428"></a>
+<span class="sourceLineNo">429</span><a name="line.429"></a>
+<span class="sourceLineNo">430</span>    try {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      RecoverLeaseFSUtils.recoverFileLease(walFS, path, conf, reporter);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>      try {<a name="line.432"></a>
+<span class="sourceLineNo">433</span>        in = getReader(path, reporter);<a name="line.433"></a>
+<span class="sourceLineNo">434</span>      } catch (EOFException e) {<a name="line.434"></a>
+<span class="sourceLineNo">435</span>        if (length &lt;= 0) {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>          // TODO should we ignore an empty, not-last log file if skip.errors<a name="line.436"></a>
+<span class="sourceLineNo">437</span>          // is false? Either way, the caller should decide what to do. E.g.<a name="line.437"></a>
+<span class="sourceLineNo">438</span>          // ignore if this is the last log in sequence.<a name="line.438"></a>
+<span class="sourceLineNo">439</span>          // TODO is this scenario still possible if the log has been<a name="line.439"></a>
+<span class="sourceLineNo">440</span>          // recovered (i.e. closed)<a name="line.440"></a>
+<span class="sourceLineNo">441</span>          LOG.warn("Could not open {} for reading. File is empty", path, e);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        }<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        // EOFException being ignored<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        return null;<a name="line.444"></a>
+<span class="sourceLineNo">445</span>      }<a name="line.445"></a>
+<span class="sourceLineNo">446</span>    } catch (IOException e) {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      if (e instanceof FileNotFoundException) {<a name="line.447"></a>
+<span class="sourceLineNo">448</span>        // A wal file may not exist anymore. Nothing can be recovered so move on<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        LOG.warn("File {} does not exist anymore", path, e);<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return null;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      }<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      if (!skipErrors || e instanceof InterruptedIOException) {<a name="line.452"></a>
+<span class="sourceLineNo">453</span>        throw e; // Don't mark the file corrupted if interrupted, or not skipErrors<a name="line.453"></a>
+<span class="sourceLineNo">454</span>      }<a name="line.454"></a>
+<span class="sourceLineNo">455</span>      throw new CorruptedLogFileException("skipErrors=true Could not open wal "<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        + path + " ignoring", e);<a name="line.456"></a>
+<span class="sourceLineNo">457</span>    }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    return in;<a name="line.458"></a>
+<span class="sourceLineNo">459</span>  }<a name="line.459"></a>
+<span class="sourceLineNo">460</span><a name="line.460"></a>
+<span class="sourceLineNo">461</span>  private Entry getNextLogLine(Reader in, Path path, boolean skipErrors)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      throws CorruptedLogFileException, IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>    try {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      return in.next();<a name="line.464"></a>
+<span class="sourceLineNo">465</span>    } catch (EOFException eof) {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>      // truncated files are expected if a RS crashes (see HBASE-2643)<a name="line.466"></a>
+<span class="sourceLineNo">467</span>      LOG.info("EOF from wal {}. Continuing.", path);<a name="line.467"></a>
+<span class="sourceLineNo">468</span>      return null;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>    } catch (IOException e) {<a name="line.469"></a>
+<span class="sourceLineNo">470</span>      // If the IOE resulted from bad file format,<a name="line.470"></a>
+<span class="sourceLineNo">471</span>      // then this problem is idempotent and retrying won't help<a name="line.471"></a>
+<span class="sourceLineNo">472</span>      if (e.getCause() != null &amp;&amp; (e.getCause() instanceof ParseException<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          || e.getCause() instanceof org.apache.hadoop.fs.ChecksumException)) {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>        LOG.warn("Parse exception from wal {}. Continuing", path, e);<a name="line.474"></a>
+<span class="sourceLineNo">475</span>        return null;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>      }<a name="line.476"></a>
+<span class="sourceLineNo">477</span>      if (!skipErrors) {<a name="line.477"></a>
+<span class="sourceLineNo">478</span>        throw e;<a name="line.478"></a>
+<span class="sourceLineNo">479</span>      }<a name="line.479"></a>
+<span class="sourceLineNo">480</span>      throw new CorruptedLogFileException("skipErrors=true Ignoring exception"<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        + " while parsing wal " + path + ". Marking as corrupted", e);<a name="line.481"></a>
+<span class="sourceLineNo">482</span>    }<a name="line.482"></a>
 <span class="sourceLineNo">483</span>  }<a name="line.483"></a>
 <span class="sourceLineNo">484</span><a name="line.484"></a>
 <span class="sourceLineNo">485</span>  /**<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * Get current open writers<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   */<a name="line.487"></a>
-<span class="sourceLineNo">488</span>  private int getNumOpenWriters() {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    int result = 0;<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    if (this.outputSink != null) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      result += this.outputSink.getNumOpenWriters();<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    }<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    return result;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>  }<a name="line.494"></a>
-<span class="sourceLineNo">495</span><a name="line.495"></a>
-<span class="sourceLineNo">496</span>  /**<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * Contains some methods to control WAL-entries producer / consumer interactions<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   */<a name="line.498"></a>
-<span class="sourceLineNo">499</span>  public static class PipelineController {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    // If an exception is thrown by one of the other threads, it will be<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    // stored here.<a name="line.501"></a>
-<span class="sourceLineNo">502</span>    AtomicReference&lt;Throwable&gt; thrown = new AtomicReference&lt;&gt;();<a name="line.502"></a>
-<span class="sourceLineNo">503</span><a name="line.503"></a>
-<span class="sourceLineNo">504</span>    // Wait/notify for when data has been produced by the writer thread,<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    // consumed by the reader thread, or an exception occurred<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    final Object dataAvailable = new Object();<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>    void writerThreadError(Throwable t) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      thrown.compareAndSet(null, t);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>    }<a name="line.510"></a>
+<span class="sourceLineNo">486</span>   * Create a new {@link WALProvider.Writer} for writing log splits.<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * @return a new Writer instance, caller should close<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   */<a name="line.488"></a>
+<span class="sourceLineNo">489</span>  protected WALProvider.Writer createWriter(Path logfile) throws IOException {<a name="line.489"></a>
+<span class="sourceLineNo">490</span>    return walFactory.createRecoveredEditsWriter(walFS, logfile);<a name="line.490"></a>
+<span class="sourceLineNo">491</span>  }<a name="line.491"></a>
+<span class="sourceLineNo">492</span><a name="line.492"></a>
+<span class="sourceLineNo">493</span>  /**<a name="line.493"></a>
+<span class="sourceLineNo">494</span>   * Create a new {@link Reader} for reading logs to split.<a name="line.494"></a>
+<span class="sourceLineNo">495</span>   * @return new Reader instance, caller should close<a name="line.495"></a>
+<span class="sourceLineNo">496</span>   */<a name="line.496"></a>
+<span class="sourceLineNo">497</span>  protected Reader getReader(Path curLogFile, CancelableProgressable reporter) throws IOException {<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    return walFactory.createReader(walFS, curLogFile, reporter);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>  }<a name="line.499"></a>
+<span class="sourceLineNo">500</span><a name="line.500"></a>
+<span class="sourceLineNo">501</span>  /**<a name="line.501"></a>
+<span class="sourceLineNo">502</span>   * Get current open writers<a name="line.502"></a>
+<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
+<span class="sourceLineNo">504</span>  private int getNumOpenWriters() {<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    int result = 0;<a name="line.505"></a>
+<span class="sourceLineNo">506</span>    if (this.outputSink != null) {<a name="line.506"></a>
+<span class="sourceLineNo">507</span>      result += this.outputSink.getNumOpenWriters();<a name="line.507"></a>
+<span class="sourceLineNo">508</span>    }<a name="line.508"></a>
+<span class="sourceLineNo">509</span>    return result;<a name="line.509"></a>
+<span class="sourceLineNo">510</span>  }<a name="line.510"></a>
 <span class="sourceLineNo">511</span><a name="line.511"></a>
-<span class="sourceLineNo">512</span>    /**<a name="line.512"></a>
-<span class="sourceLineNo">513</span>     * Check for errors in the writer threads. If any is found, rethrow it.<a name="line.513"></a>
-<span class="sourceLineNo">514</span>     */<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    void checkForErrors() throws IOException {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      Throwable thrown = this.thrown.get();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      if (thrown == null) {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>        return;<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>      if (thrown instanceof IOException) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>        throw new IOException(thrown);<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      } else {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>        throw new RuntimeException(thrown);<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      }<a name="line.524"></a>
-<span class="sourceLineNo">525</span>    }<a name="line.525"></a>
-<span class="sourceLineNo">526</span>  }<a name="line.526"></a>
+<span class="sourceLineNo">512</span>  /**<a name="line.512"></a>
+<span class="sourceLineNo">513</span>   * Contains some methods to control WAL-entries producer / consumer interactions<a name="line.513"></a>
+<span class="sourceLineNo">514</span>   */<a name="line.514"></a>
+<span class="sourceLineNo">515</span>  public static class PipelineController {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    // If an exception is thrown by one of the other threads, it will be<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    // stored here.<a name="line.517"></a>
+<span class="sourceLineNo">518</span>    AtomicReference&lt;Throwable&gt; thrown = new AtomicReference&lt;&gt;();<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>    // Wait/notify for when data has been produced by the writer thread,<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    // consumed by the reader thread, or an exception occurred<a name="line.521"></a>
+<span class="sourceLineNo">522</span>    final Object dataAvailable = new Object();<a name="line.522"></a>
+<span class="sourceLineNo">523</span><a name="line.523"></a>
+<span class="sourceLineNo">524</span>    void writerThreadError(Throwable t) {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>      thrown.compareAndSet(null, t);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>    }<a name="line.526"></a>
 <span class="sourceLineNo">527</span><a name="line.527"></a>
-<span class="sourceLineNo">528</span>  static class CorruptedLogFileException extends Exception {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    private static final long serialVersionUID = 1L;<a name="line.529"></a>
-<span class="sourceLineNo">530</span><a name="line.530"></a>
-<span class="sourceLineNo">531</span>    CorruptedLogFileException(String s) {<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      super(s);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    }<a name="line.533"></a>
-<span class="sourceLineNo">534</span><a name="line.534"></a>
-<span class="sourceLineNo">535</span>    /**<a name="line.535"></a>
-<span class="sourceLineNo">536</span>     * CorruptedLogFileException with cause<a name="line.536"></a>
-<span class="sourceLineNo">537</span>     *<a name="line.537"></a>
-<span class="sourceLineNo">538</span>     * @param message the message for this exception<a name="line.538"></a>
-<span class="sourceLineNo">539</span>     * @param cause the cause for this exception<a name="line.539"></a>
-<span class="sourceLineNo">540</span>     */<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    CorruptedLogFileException(String message, Throwable cause) {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      super(message, cause);<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>  }<a name="line.544"></a>
-<span class="sourceLineNo">545</span>}<a name="line.545"></a>
+<span class="sourceLineNo">528</span>    /**<a name="line.528"></a>
+<span class="sourceLineNo">529</span>     * Check for errors in the writer threads. If any is found, rethrow it.<a name="line.529"></a>
+<span class="sourceLineNo">530</span>     */<a name="line.530"></a>
+<span class="sourceLineNo">531</span>    void checkForErrors() throws IOException {<a name="line.531"></a>
+<span class="sourceLineNo">532</span>      Throwable thrown = this.thrown.get();<a name="line.532"></a>
+<span class="sourceLineNo">533</span>      if (thrown == null) {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>        return;<a name="line.534"></a>
+<span class="sourceLineNo">535</span>      }<a name="line.535"></a>
+<span class="sourceLineNo">536</span>      if (thrown instanceof IOException) {<a name="line.536"></a>
+<span class="sourceLineNo">537</span>        throw new IOException(thrown);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>      } else {<a name="line.538"></a>
+<span class="sourceLineNo">539</span>        throw new RuntimeException(thrown);<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      }<a name="line.540"></a>
+<span class="sourceLineNo">541</span>    }<a name="line.541"></a>
+<span class="sourceLineNo">542</span>  }<a name="line.542"></a>
+<span class="sourceLineNo">543</span><a name="line.543"></a>
+<span class="sourceLineNo">544</span>  static class CorruptedLogFileException extends Exception {<a name="line.544"></a>
+<span class="sourceLineNo">545</span>    private static final long serialVersionUID = 1L;<a name="line.545"></a>
+<span class="sourceLineNo">546</span><a name="line.546"></a>
+<span class="sourceLineNo">547</span>    CorruptedLogFileException(String s) {<a name="line.547"></a>
+<span class="sourceLineNo">548</span>      super(s);<a name="line.548"></a>
+<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
+<span class="sourceLineNo">550</span><a name="line.550"></a>
+<span class="sourceLineNo">551</span>    /**<a name="line.551"></a>
+<span class="sourceLineNo">552</span>     * CorruptedLogFileException with cause<a name="line.552"></a>
+<span class="sourceLineNo">553</span>     *<a name="line.553"></a>
+<span class="sourceLineNo">554</span>     * @param message the message for this exception<a name="line.554"></a>
+<span class="sourceLineNo">555</span>     * @param cause the cause for this exception<a name="line.555"></a>
+<span class="sourceLineNo">556</span>     */<a name="line.556"></a>
+<span class="sourceLineNo">557</span>    CorruptedLogFileException(String message, Throwable cause) {<a name="line.557"></a>
+<span class="sourceLineNo">558</span>      super(message, cause);<a name="line.558"></a>
+<span class="sourceLineNo">559</span>    }<a name="line.559"></a>
+<span class="sourceLineNo">560</span>  }<a name="line.560"></a>
+<span class="sourceLineNo">561</span>}<a name="line.561"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
index 8b6211e..e7488c7 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
@@ -6,7 +6,7 @@
 </head>
 <body>
 <div class="sourceContainer">
-<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a>
+<pre><span class="sourceLineNo">001</span>/*<a name="line.1"></a>
 <span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a>
 <span class="sourceLineNo">003</span> * or more contributor license agreements.  See the NOTICE file<a name="line.3"></a>
 <span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a>
@@ -25,532 +25,548 @@
 <span class="sourceLineNo">017</span> */<a name="line.17"></a>
 <span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.wal;<a name="line.18"></a>
 <span class="sourceLineNo">019</span><a name="line.19"></a>
-<span class="sourceLineNo">020</span>import static org.apache.hadoop.hbase.wal.BoundedRecoveredHFilesOutputSink.DEFAULT_WAL_SPLIT_TO_HFILE;<a name="line.20"></a>
-<span class="sourceLineNo">021</span>import static org.apache.hadoop.hbase.wal.BoundedRecoveredHFilesOutputSink.WAL_SPLIT_TO_HFILE;<a name="line.21"></a>
-<span class="sourceLineNo">022</span>import static org.apache.hadoop.hbase.wal.WALSplitUtil.finishSplitLogFile;<a name="line.22"></a>
-<span class="sourceLineNo">023</span><a name="line.23"></a>
-<span class="sourceLineNo">024</span>import java.io.EOFException;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import java.io.FileNotFoundException;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import java.io.IOException;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import java.io.InterruptedIOException;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import java.text.ParseException;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.util.ArrayList;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import java.util.Collections;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import java.util.List;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import java.util.Map;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import java.util.TreeMap;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.conf.Configuration;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileStatus;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileSystem;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.Path;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.HConstants;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.TableDescriptors;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.procedure2.util.StringUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.regionserver.RegionServerServices;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.CommonFSUtils;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.util.FSTableDescriptors;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.util.RecoverLeaseFSUtils;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.slf4j.Logger;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.slf4j.LoggerFactory;<a name="line.65"></a>
-<span class="sourceLineNo">066</span><a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.69"></a>
-<span class="sourceLineNo">070</span><a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;<a name="line.72"></a>
-<span class="sourceLineNo">073</span><a name="line.73"></a>
-<span class="sourceLineNo">074</span>/**<a name="line.74"></a>
-<span class="sourceLineNo">075</span> * This class is responsible for splitting up a bunch of regionserver commit log<a name="line.75"></a>
-<span class="sourceLineNo">076</span> * files that are no longer being written to, into new files, one per region, for<a name="line.76"></a>
-<span class="sourceLineNo">077</span> * recovering data on startup. Delete the old log files when finished.<a name="line.77"></a>
-<span class="sourceLineNo">078</span> */<a name="line.78"></a>
-<span class="sourceLineNo">079</span>@InterfaceAudience.Private<a name="line.79"></a>
-<span class="sourceLineNo">080</span>public class WALSplitter {<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  private static final Logger LOG = LoggerFactory.getLogger(WALSplitter.class);<a name="line.81"></a>
-<span class="sourceLineNo">082</span><a name="line.82"></a>
-<span class="sourceLineNo">083</span>  /** By default we retry errors in splitting, rather than skipping. */<a name="line.83"></a>
-<span class="sourceLineNo">084</span>  public static final boolean SPLIT_SKIP_ERRORS_DEFAULT = false;<a name="line.84"></a>
-<span class="sourceLineNo">085</span><a name="line.85"></a>
-<span class="sourceLineNo">086</span>  // Parameters for split process<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  protected final Path walDir;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  protected final FileSystem walFS;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  protected final Configuration conf;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  final Path rootDir;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  final FileSystem rootFS;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  final RegionServerServices rsServices;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  final TableDescriptors tableDescriptors;<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  // Major subcomponents of the split process.<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  // These are separated into inner classes to make testing easier.<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  OutputSink outputSink;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  private EntryBuffers entryBuffers;<a name="line.98"></a>
-<span class="sourceLineNo">099</span><a name="line.99"></a>
-<span class="sourceLineNo">100</span>  private SplitLogWorkerCoordination splitLogWorkerCoordination;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  private final WALFactory walFactory;<a name="line.101"></a>
-<span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private MonitoredTask status;<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  // For checking the latest flushed sequence id<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  protected final LastSequenceId sequenceIdChecker;<a name="line.106"></a>
+<span class="sourceLineNo">020</span>import static org.apache.hadoop.hbase.wal.WALSplitUtil.finishSplitLogFile;<a name="line.20"></a>
+<span class="sourceLineNo">021</span>import java.io.EOFException;<a name="line.21"></a>
+<span class="sourceLineNo">022</span>import java.io.FileNotFoundException;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import java.io.IOException;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import java.io.InterruptedIOException;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import java.text.ParseException;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import java.util.ArrayList;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import java.util.Collections;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import java.util.List;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import java.util.Map;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.TreeMap;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.conf.Configuration;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FileStatus;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.FileSystem;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.Path;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.HConstants;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.TableDescriptors;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.TableName;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.procedure2.util.StringUtils;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.regionserver.RegionServerServices;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.CommonFSUtils;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.FSTableDescriptors;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.RecoverLeaseFSUtils;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.slf4j.Logger;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.slf4j.LoggerFactory;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;<a name="line.67"></a>
+<span class="sourceLineNo">068</span><a name="line.68"></a>
+<span class="sourceLineNo">069</span>/**<a name="line.69"></a>
+<span class="sourceLineNo">070</span> * Split RegionServer WAL files. Splits the WAL into new files,<a name="line.70"></a>
+<span class="sourceLineNo">071</span> * one per region, to be picked up on Region reopen. Deletes the split WAL when finished.<a name="line.71"></a>
+<span class="sourceLineNo">072</span> * See {@link #split(Path, Path, Path, FileSystem, Configuration, WALFactory)} or<a name="line.72"></a>
+<span class="sourceLineNo">073</span> * {@link #splitLogFile(Path, FileStatus, FileSystem, Configuration, CancelableProgressable,<a name="line.73"></a>
+<span class="sourceLineNo">074</span> *   LastSequenceId, SplitLogWorkerCoordination, WALFactory, RegionServerServices)} for<a name="line.74"></a>
+<span class="sourceLineNo">075</span> *   entry-point.<a name="line.75"></a>
+<span class="sourceLineNo">076</span> */<a name="line.76"></a>
+<span class="sourceLineNo">077</span>@InterfaceAudience.Private<a name="line.77"></a>
+<span class="sourceLineNo">078</span>public class WALSplitter {<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  private static final Logger LOG = LoggerFactory.getLogger(WALSplitter.class);<a name="line.79"></a>
+<span class="sourceLineNo">080</span><a name="line.80"></a>
+<span class="sourceLineNo">081</span>  /** By default we retry errors in splitting, rather than skipping. */<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  public static final boolean SPLIT_SKIP_ERRORS_DEFAULT = false;<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  // Parameters for split process<a name="line.84"></a>
+<span class="sourceLineNo">085</span>  protected final Path walDir;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>  protected final FileSystem walFS;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  protected final Configuration conf;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  final Path rootDir;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>  final FileSystem rootFS;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>  final RegionServerServices rsServices;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>  final TableDescriptors tableDescriptors;<a name="line.91"></a>
+<span class="sourceLineNo">092</span><a name="line.92"></a>
+<span class="sourceLineNo">093</span>  // Major subcomponents of the split process.<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  // These are separated into inner classes to make testing easier.<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  OutputSink outputSink;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  private EntryBuffers entryBuffers;<a name="line.96"></a>
+<span class="sourceLineNo">097</span><a name="line.97"></a>
+<span class="sourceLineNo">098</span>  /**<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   * Coordinator for split log. Used by the zk-based log splitter.<a name="line.99"></a>
+<span class="sourceLineNo">100</span>   * Not used by the procedure v2-based log splitter.<a name="line.100"></a>
+<span class="sourceLineNo">101</span>   */<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  private SplitLogWorkerCoordination splitLogWorkerCoordination;<a name="line.102"></a>
+<span class="sourceLineNo">103</span><a name="line.103"></a>
+<span class="sourceLineNo">104</span>  private final WALFactory walFactory;<a name="line.104"></a>
+<span class="sourceLineNo">105</span><a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private MonitoredTask status;<a name="line.106"></a>
 <span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  // Map encodedRegionName -&gt; lastFlushedSequenceId<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  protected Map&lt;String, Long&gt; lastFlushedSequenceIds = new ConcurrentHashMap&lt;&gt;();<a name="line.109"></a>
+<span class="sourceLineNo">108</span>  // For checking the latest flushed sequence id<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  protected final LastSequenceId sequenceIdChecker;<a name="line.109"></a>
 <span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>  // Map encodedRegionName -&gt; maxSeqIdInStores<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  protected Map&lt;String, Map&lt;byte[], Long&gt;&gt; regionMaxSeqIdInStores = new ConcurrentHashMap&lt;&gt;();<a name="line.112"></a>
+<span class="sourceLineNo">111</span>  // Map encodedRegionName -&gt; lastFlushedSequenceId<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  protected Map&lt;String, Long&gt; lastFlushedSequenceIds = new ConcurrentHashMap&lt;&gt;();<a name="line.112"></a>
 <span class="sourceLineNo">113</span><a name="line.113"></a>
-<span class="sourceLineNo">114</span>  // the file being split currently<a name="line.114"></a>
-<span class="sourceLineNo">115</span>  private FileStatus fileBeingSplit;<a name="line.115"></a>
+<span class="sourceLineNo">114</span>  // Map encodedRegionName -&gt; maxSeqIdInStores<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  protected Map&lt;String, Map&lt;byte[], Long&gt;&gt; regionMaxSeqIdInStores = new ConcurrentHashMap&lt;&gt;();<a name="line.115"></a>
 <span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>  private final String tmpDirName;<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  public final static String SPLIT_WRITER_CREATION_BOUNDED = "hbase.split.writer.creation.bounded";<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public final static String SPLIT_WAL_BUFFER_SIZE = "hbase.regionserver.hlog.splitlog.buffersize";<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  public final static String SPLIT_WAL_WRITER_THREADS =<a name="line.121"></a>
-<span class="sourceLineNo">122</span>      "hbase.regionserver.hlog.splitlog.writer.threads";<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span>  @VisibleForTesting<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  WALSplitter(final WALFactory factory, Configuration conf, Path walDir, FileSystem walFS,<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      Path rootDir, FileSystem rootFS, LastSequenceId idChecker,<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      SplitLogWorkerCoordination splitLogWorkerCoordination, RegionServerServices rsServices) {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    this.conf = HBaseConfiguration.create(conf);<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    String codecClassName =<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        conf.get(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    this.conf.set(HConstants.RPC_CODEC_CONF_KEY, codecClassName);<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    this.walDir = walDir;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    this.walFS = walFS;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    this.rootDir = rootDir;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    this.rootFS = rootFS;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    this.sequenceIdChecker = idChecker;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    this.splitLogWorkerCoordination = splitLogWorkerCoordination;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    this.rsServices = rsServices;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    if (rsServices != null) {<a name="line.139"></a>
-<span class="sourceLineNo">140</span>      this.tableDescriptors = rsServices.getTableDescriptors();<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    } else {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      this.tableDescriptors = new FSTableDescriptors(rootFS, rootDir, true, true);<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    }<a name="line.143"></a>
-<span class="sourceLineNo">144</span><a name="line.144"></a>
-<span class="sourceLineNo">145</span>    this.walFactory = factory;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>    PipelineController controller = new PipelineController();<a name="line.146"></a>
-<span class="sourceLineNo">147</span>    this.tmpDirName =<a name="line.147"></a>
-<span class="sourceLineNo">148</span>      conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY, HConstants.DEFAULT_TEMPORARY_HDFS_DIRECTORY);<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>    // if we limit the number of writers opened for sinking recovered edits<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    boolean splitWriterCreationBounded = conf.getBoolean(SPLIT_WRITER_CREATION_BOUNDED, false);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    boolean splitToHFile = conf.getBoolean(WAL_SPLIT_TO_HFILE, DEFAULT_WAL_SPLIT_TO_HFILE);<a name="line.153"></a>
-<span class="sourceLineNo">154</span>    long bufferSize = this.conf.getLong(SPLIT_WAL_BUFFER_SIZE, 128 * 1024 * 1024);<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    int numWriterThreads = this.conf.getInt(SPLIT_WAL_WRITER_THREADS, 3);<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>    if (splitToHFile) {<a name="line.157"></a>
-<span class="sourceLineNo">158</span>      entryBuffers = new BoundedEntryBuffers(controller, bufferSize);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      outputSink =<a name="line.159"></a>
-<span class="sourceLineNo">160</span>          new BoundedRecoveredHFilesOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    } else if (splitWriterCreationBounded) {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      entryBuffers = new BoundedEntryBuffers(controller, bufferSize);<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      outputSink =<a name="line.163"></a>
-<span class="sourceLineNo">164</span>          new BoundedRecoveredEditsOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.164"></a>
-<span class="sourceLineNo">165</span>    } else {<a name="line.165"></a>
-<span class="sourceLineNo">166</span>      entryBuffers = new EntryBuffers(controller, bufferSize);<a name="line.166"></a>
-<span class="sourceLineNo">167</span>      outputSink = new RecoveredEditsOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
-<span class="sourceLineNo">169</span>  }<a name="line.169"></a>
-<span class="sourceLineNo">170</span><a name="line.170"></a>
-<span class="sourceLineNo">171</span>  WALFactory getWalFactory(){<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    return this.walFactory;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>  FileStatus getFileBeingSplit() {<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    return fileBeingSplit;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>  }<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  String getTmpDirName() {<a name="line.179"></a>
-<span class="sourceLineNo">180</span>    return this.tmpDirName;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  }<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>  Map&lt;String, Map&lt;byte[], Long&gt;&gt; getRegionMaxSeqIdInStores() {<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return regionMaxSeqIdInStores;<a name="line.184"></a>
+<span class="sourceLineNo">117</span>  // the file being split currently<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  private FileStatus fileBeingSplit;<a name="line.118"></a>
+<span class="sourceLineNo">119</span><a name="line.119"></a>
+<span class="sourceLineNo">120</span>  private final String tmpDirName;<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  /**<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   * Split WAL directly to hfiles instead of into intermediary 'recovered.edits' files.<a name="line.123"></a>
+<span class="sourceLineNo">124</span>   */<a name="line.124"></a>
+<span class="sourceLineNo">125</span>  public static final String WAL_SPLIT_TO_HFILE = "hbase.wal.split.to.hfile";<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  public static final boolean DEFAULT_WAL_SPLIT_TO_HFILE = false;<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  /**<a name="line.128"></a>
+<span class="sourceLineNo">129</span>   * True if we are to run with bounded amount of writers rather than let the count blossom.<a name="line.129"></a>
+<span class="sourceLineNo">130</span>   * Default is 'false'. Does not apply if you have set 'hbase.wal.split.to.hfile' as that<a name="line.130"></a>
+<span class="sourceLineNo">131</span>   * is always bounded. Only applies when you are doing recovery to 'recovered.edits'<a name="line.131"></a>
+<span class="sourceLineNo">132</span>   * files (the old default). Bounded writing tends to have higher throughput.<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   */<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  public final static String SPLIT_WRITER_CREATION_BOUNDED = "hbase.split.writer.creation.bounded";<a name="line.134"></a>
+<span class="sourceLineNo">135</span><a name="line.135"></a>
+<span class="sourceLineNo">136</span>  public final static String SPLIT_WAL_BUFFER_SIZE = "hbase.regionserver.hlog.splitlog.buffersize";<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  public final static String SPLIT_WAL_WRITER_THREADS =<a name="line.137"></a>
+<span class="sourceLineNo">138</span>      "hbase.regionserver.hlog.splitlog.writer.threads";<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>  @VisibleForTesting<a name="line.140"></a>
+<span class="sourceLineNo">141</span>  WALSplitter(final WALFactory factory, Configuration conf, Path walDir, FileSystem walFS,<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      Path rootDir, FileSystem rootFS, LastSequenceId idChecker,<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      SplitLogWorkerCoordination splitLogWorkerCoordination, RegionServerServices rsServices) {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    this.conf = HBaseConfiguration.create(conf);<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    String codecClassName =<a name="line.145"></a>
+<span class="sourceLineNo">146</span>        conf.get(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    this.conf.set(HConstants.RPC_CODEC_CONF_KEY, codecClassName);<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    this.walDir = walDir;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    this.walFS = walFS;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    this.rootDir = rootDir;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    this.rootFS = rootFS;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    this.sequenceIdChecker = idChecker;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    this.splitLogWorkerCoordination = splitLogWorkerCoordination;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    this.rsServices = rsServices;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    if (rsServices != null) {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      this.tableDescriptors = rsServices.getTableDescriptors();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    } else {<a name="line.157"></a>
+<span class="sourceLineNo">158</span>      this.tableDescriptors = new FSTableDescriptors(rootFS, rootDir, true, true);<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>    this.walFactory = factory;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    PipelineController controller = new PipelineController();<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    this.tmpDirName =<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY, HConstants.DEFAULT_TEMPORARY_HDFS_DIRECTORY);<a name="line.164"></a>
+<span class="sourceLineNo">165</span><a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>    // if we limit the number of writers opened for sinking recovered edits<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    boolean splitWriterCreationBounded = conf.getBoolean(SPLIT_WRITER_CREATION_BOUNDED, false);<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    boolean splitToHFile = conf.getBoolean(WAL_SPLIT_TO_HFILE, DEFAULT_WAL_SPLIT_TO_HFILE);<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    long bufferSize = this.conf.getLong(SPLIT_WAL_BUFFER_SIZE, 128 * 1024 * 1024);<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    int numWriterThreads = this.conf.getInt(SPLIT_WAL_WRITER_THREADS, 3);<a name="line.171"></a>
+<span class="sourceLineNo">172</span><a name="line.172"></a>
+<span class="sourceLineNo">173</span>    if (splitToHFile) {<a name="line.173"></a>
+<span class="sourceLineNo">174</span>      entryBuffers = new BoundedEntryBuffers(controller, bufferSize);<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      outputSink =<a name="line.175"></a>
+<span class="sourceLineNo">176</span>          new BoundedRecoveredHFilesOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.176"></a>
+<span class="sourceLineNo">177</span>    } else if (splitWriterCreationBounded) {<a name="line.177"></a>
+<span class="sourceLineNo">178</span>      entryBuffers = new BoundedEntryBuffers(controller, bufferSize);<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      outputSink =<a name="line.179"></a>
+<span class="sourceLineNo">180</span>          new BoundedRecoveredEditsOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.180"></a>
+<span class="sourceLineNo">181</span>    } else {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>      entryBuffers = new EntryBuffers(controller, bufferSize);<a name="line.182"></a>
+<span class="sourceLineNo">183</span>      outputSink = new RecoveredEditsOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    }<a name="line.184"></a>
 <span class="sourceLineNo">185</span>  }<a name="line.185"></a>
 <span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /**<a name="line.187"></a>
-<span class="sourceLineNo">188</span>   * Splits a WAL file into region's recovered-edits directory.<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * This is the main entry point for distributed log splitting from SplitLogWorker.<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * &lt;p&gt;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * If the log file has N regions then N recovered.edits files will be produced.<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * &lt;p&gt;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * @return false if it is interrupted by the progress-able.<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  public static boolean splitLogFile(Path walDir, FileStatus logfile, FileSystem walFS,<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      Configuration conf, CancelableProgressable reporter, LastSequenceId idChecker,<a name="line.196"></a>
-<span class="sourceLineNo">197</span>      SplitLogWorkerCoordination splitLogWorkerCoordination, WALFactory factory,<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      RegionServerServices rsServices) throws IOException {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    Path rootDir = CommonFSUtils.getRootDir(conf);<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    FileSystem rootFS = rootDir.getFileSystem(conf);<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    WALSplitter s = new WALSplitter(factory, conf, walDir, walFS, rootDir, rootFS, idChecker,<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        splitLogWorkerCoordination, rsServices);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    return s.splitLogFile(logfile, reporter);<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  }<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  // A wrapper to split one log folder using the method used by distributed<a name="line.206"></a>
-<span class="sourceLineNo">207</span>  // log splitting. Used by tools and unit tests. It should be package private.<a name="line.207"></a>
-<span class="sourceLineNo">208</span>  // It is public only because TestWALObserver is in a different package,<a name="line.208"></a>
-<span class="sourceLineNo">209</span>  // which uses this method to do log splitting.<a name="line.209"></a>
-<span class="sourceLineNo">210</span>  @VisibleForTesting<a name="line.210"></a>
-<span class="sourceLineNo">211</span>  public static List&lt;Path&gt; split(Path walDir, Path logDir, Path oldLogDir, FileSystem walFS,<a name="line.211"></a>
-<span class="sourceLineNo">212</span>      Configuration conf, final WALFactory factory) throws IOException {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    Path rootDir = CommonFSUtils.getRootDir(conf);<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    FileSystem rootFS = rootDir.getFileSystem(conf);<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    final FileStatus[] logfiles =<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        SplitLogManager.getFileList(conf, Collections.singletonList(logDir), null);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    List&lt;Path&gt; splits = new ArrayList&lt;&gt;();<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    if (ArrayUtils.isNotEmpty(logfiles)) {<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      for (FileStatus logfile : logfiles) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>        WALSplitter s =<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            new WALSplitter(factory, conf, walDir, walFS, rootDir, rootFS, null, null, null);<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        if (s.splitLogFile(logfile, null)) {<a name="line.222"></a>
-<span class="sourceLineNo">223</span>          finishSplitLogFile(walDir, oldLogDir, logfile.getPath(), conf);<a name="line.223"></a>
-<span class="sourceLineNo">224</span>          if (s.outputSink.splits != null) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>            splits.addAll(s.outputSink.splits);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>          }<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        }<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    }<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    if (!walFS.delete(logDir, true)) {<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      throw new IOException("Unable to delete src dir: " + logDir);<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    }<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    return splits;<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  }<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /**<a name="line.236"></a>
-<span class="sourceLineNo">237</span>   * log splitting implementation, splits one log file.<a name="line.237"></a>
-<span class="sourceLineNo">238</span>   * @param logfile should be an actual log file.<a name="line.238"></a>
-<span class="sourceLineNo">239</span>   */<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  @VisibleForTesting<a name="line.240"></a>
-<span class="sourceLineNo">241</span>  boolean splitLogFile(FileStatus logfile, CancelableProgressable reporter) throws IOException {<a name="line.241"></a>
-<span class="sourceLineNo">242</span>    Preconditions.checkState(status == null);<a name="line.242"></a>
-<span class="sourceLineNo">243</span>    Preconditions.checkArgument(logfile.isFile(),<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        "passed in file status is for something other than a regular file.");<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    boolean isCorrupted = false;<a name="line.245"></a>
-<span class="sourceLineNo">246</span>    boolean skipErrors = conf.getBoolean("hbase.hlog.split.skip.errors",<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      SPLIT_SKIP_ERRORS_DEFAULT);<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    int interval = conf.getInt("hbase.splitlog.report.interval.loglines", 1024);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    Path logPath = logfile.getPath();<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    boolean outputSinkStarted = false;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    boolean progressFailed = false;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    int editsCount = 0;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    int editsSkipped = 0;<a name="line.253"></a>
-<span class="sourceLineNo">254</span><a name="line.254"></a>
-<span class="sourceLineNo">255</span>    status = TaskMonitor.get().createStatus(<a name="line.255"></a>
-<span class="sourceLineNo">256</span>          "Splitting log file " + logfile.getPath() + "into a temporary staging area.");<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    Reader logFileReader = null;<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    this.fileBeingSplit = logfile;<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    long startTS = EnvironmentEdgeManager.currentTime();<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    try {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      long logLength = logfile.getLen();<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      LOG.info("Splitting WAL={}, size={} ({} bytes)", logPath, StringUtils.humanSize(logLength),<a name="line.262"></a>
-<span class="sourceLineNo">263</span>          logLength);<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      status.setStatus("Opening log file");<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>        progressFailed = true;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        return false;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      }<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      logFileReader = getReader(logfile, skipErrors, reporter);<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      if (logFileReader == null) {<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        LOG.warn("Nothing to split in WAL={}", logPath);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        return true;<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      }<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      long openCost = EnvironmentEdgeManager.currentTime() - startTS;<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      LOG.info("Open WAL={} cost {} ms", logPath, openCost);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      int numOpenedFilesBeforeReporting = conf.getInt("hbase.splitlog.report.openedfiles", 3);<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      int numOpenedFilesLastCheck = 0;<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      outputSink.setReporter(reporter);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      outputSink.startWriterThreads();<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      outputSinkStarted = true;<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      Entry entry;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      Long lastFlushedSequenceId = -1L;<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      startTS = EnvironmentEdgeManager.currentTime();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>      while ((entry = getNextLogLine(logFileReader, logPath, skipErrors)) != null) {<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        byte[] region = entry.getKey().getEncodedRegionName();<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        String encodedRegionNameAsStr = Bytes.toString(region);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>        lastFlushedSequenceId = lastFlushedSequenceIds.get(encodedRegionNameAsStr);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>        if (lastFlushedSequenceId == null) {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>          if (!(isRegionDirPresentUnderRoot(entry.getKey().getTableName(), encodedRegionNameAsStr))) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>            // The region directory itself is not present in the FS. This indicates that<a name="line.290"></a>
-<span class="sourceLineNo">291</span>            // the region/table is already removed. We can just skip all the edits for this<a name="line.291"></a>
-<span class="sourceLineNo">292</span>            // region. Setting lastFlushedSequenceId as Long.MAX_VALUE so that all edits<a name="line.292"></a>
-<span class="sourceLineNo">293</span>            // will get skipped by the seqId check below.<a name="line.293"></a>
-<span class="sourceLineNo">294</span>            // See more details at https://issues.apache.org/jira/browse/HBASE-24189<a name="line.294"></a>
-<span class="sourceLineNo">295</span>            LOG.info("{} no longer available in the FS. Skipping all edits for this region.",<a name="line.295"></a>
-<span class="sourceLineNo">296</span>                encodedRegionNameAsStr);<a name="line.296"></a>
-<span class="sourceLineNo">297</span>            lastFlushedSequenceId = Long.MAX_VALUE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>          } else {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>            if (sequenceIdChecker != null) {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>              RegionStoreSequenceIds ids = sequenceIdChecker.getLastSequenceId(region);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>              Map&lt;byte[], Long&gt; maxSeqIdInStores = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.301"></a>
-<span class="sourceLineNo">302</span>              for (StoreSequenceId storeSeqId : ids.getStoreSequenceIdList()) {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>                maxSeqIdInStores.put(storeSeqId.getFamilyName().toByteArray(),<a name="line.303"></a>
-<span class="sourceLineNo">304</span>                    storeSeqId.getSequenceId());<a name="line.304"></a>
-<span class="sourceLineNo">305</span>              }<a name="line.305"></a>
-<span class="sourceLineNo">306</span>              regionMaxSeqIdInStores.put(encodedRegionNameAsStr, maxSeqIdInStores);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>              lastFlushedSequenceId = ids.getLastFlushedSequenceId();<a name="line.307"></a>
-<span class="sourceLineNo">308</span>              if (LOG.isDebugEnabled()) {<a name="line.308"></a>
-<span class="sourceLineNo">309</span>                LOG.debug("DLS Last flushed sequenceid for " + encodedRegionNameAsStr + ": "<a name="line.309"></a>
-<span class="sourceLineNo">310</span>                    + TextFormat.shortDebugString(ids));<a name="line.310"></a>
-<span class="sourceLineNo">311</span>              }<a name="line.311"></a>
-<span class="sourceLineNo">312</span>            }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>            if (lastFlushedSequenceId == null) {<a name="line.313"></a>
-<span class="sourceLineNo">314</span>              lastFlushedSequenceId = -1L;<a name="line.314"></a>
-<span class="sourceLineNo">315</span>            }<a name="line.315"></a>
-<span class="sourceLineNo">316</span>          }<a name="line.316"></a>
-<span class="sourceLineNo">317</span>          lastFlushedSequenceIds.put(encodedRegionNameAsStr, lastFlushedSequenceId);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>        }<a name="line.318"></a>
-<span class="sourceLineNo">319</span>        if (lastFlushedSequenceId &gt;= entry.getKey().getSequenceId()) {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>          editsSkipped++;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>          continue;<a name="line.321"></a>
-<span class="sourceLineNo">322</span>        }<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        // Don't send Compaction/Close/Open region events to recovered edit type sinks.<a name="line.323"></a>
-<span class="sourceLineNo">324</span>        if (entry.getEdit().isMetaEdit() &amp;&amp; !outputSink.keepRegionEvent(entry)) {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>          editsSkipped++;<a name="line.325"></a>
-<span class="sourceLineNo">326</span>          continue;<a name="line.326"></a>
-<span class="sourceLineNo">327</span>        }<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        entryBuffers.appendEntry(entry);<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        editsCount++;<a name="line.329"></a>
-<span class="sourceLineNo">330</span>        int moreWritersFromLastCheck = this.getNumOpenWriters() - numOpenedFilesLastCheck;<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        // If sufficient edits have passed, check if we should report progress.<a name="line.331"></a>
-<span class="sourceLineNo">332</span>        if (editsCount % interval == 0<a name="line.332"></a>
-<span class="sourceLineNo">333</span>            || moreWritersFromLastCheck &gt; numOpenedFilesBeforeReporting) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          numOpenedFilesLastCheck = this.getNumOpenWriters();<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          String countsStr = (editsCount - (editsSkipped + outputSink.getTotalSkippedEdits()))<a name="line.335"></a>
-<span class="sourceLineNo">336</span>              + " edits, skipped " + editsSkipped + " edits.";<a name="line.336"></a>
-<span class="sourceLineNo">337</span>          status.setStatus("Split " + countsStr);<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.338"></a>
-<span class="sourceLineNo">339</span>            progressFailed = true;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>            return false;<a name="line.340"></a>
-<span class="sourceLineNo">341</span>          }<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        }<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      }<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    } catch (InterruptedException ie) {<a name="line.344"></a>
-<span class="sourceLineNo">345</span>      IOException iie = new InterruptedIOException();<a name="line.345"></a>
-<span class="sourceLineNo">346</span>      iie.initCause(ie);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      throw iie;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    } catch (CorruptedLogFileException e) {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      LOG.warn("Could not parse, corrupted WAL={}", logPath, e);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>      if (splitLogWorkerCoordination != null) {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>        // Some tests pass in a csm of null.<a name="line.351"></a>
-<span class="sourceLineNo">352</span>        splitLogWorkerCoordination.markCorrupted(walDir, logfile.getPath().getName(), walFS);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      } else {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>        // for tests only<a name="line.354"></a>
-<span class="sourceLineNo">355</span>        ZKSplitLog.markCorrupted(walDir, logfile.getPath().getName(), walFS);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      }<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      isCorrupted = true;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    } catch (IOException e) {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      e = e instanceof RemoteException ? ((RemoteException) e).unwrapRemoteException() : e;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>      throw e;<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    } finally {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>      LOG.debug("Finishing writing output logs and closing down");<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      try {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>        if (null != logFileReader) {<a name="line.364"></a>
-<span class="sourceLineNo">365</span>          logFileReader.close();<a name="line.365"></a>
-<span class="sourceLineNo">366</span>        }<a name="line.366"></a>
-<span class="sourceLineNo">367</span>      } catch (IOException exception) {<a name="line.367"></a>
-<span class="sourceLineNo">368</span>        LOG.warn("Could not close WAL reader", exception);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
-<span class="sourceLineNo">370</span>      try {<a name="line.370"></a>
-<span class="sourceLineNo">371</span>        if (outputSinkStarted) {<a name="line.371"></a>
-<span class="sourceLineNo">372</span>          // Set progress_failed to true as the immediate following statement will reset its value<a name="line.372"></a>
-<span class="sourceLineNo">373</span>          // when close() throws exception, progress_failed has the right value<a name="line.373"></a>
-<span class="sourceLineNo">374</span>          progressFailed = true;<a name="line.374"></a>
-<span class="sourceLineNo">375</span>          progressFailed = outputSink.close() == null;<a name="line.375"></a>
-<span class="sourceLineNo">376</span>        }<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      } finally {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>        long processCost = EnvironmentEdgeManager.currentTime() - startTS;<a name="line.378"></a>
-<span class="sourceLineNo">379</span>        // See if length got updated post lease recovery<a name="line.379"></a>
-<span class="sourceLineNo">380</span>        String msg = "Processed " + editsCount + " edits across " +<a name="line.380"></a>
-<span class="sourceLineNo">381</span>            outputSink.getNumberOfRecoveredRegions() + " regions cost " + processCost +<a name="line.381"></a>
-<span class="sourceLineNo">382</span>            " ms; edits skipped=" + editsSkipped + "; WAL=" + logPath + ", size=" +<a name="line.382"></a>
-<span class="sourceLineNo">383</span>            StringUtils.humanSize(logfile.getLen()) + ", length=" + logfile.getLen() +<a name="line.383"></a>
-<span class="sourceLineNo">384</span>            ", corrupted=" + isCorrupted + ", progress failed=" + progressFailed;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>        LOG.info(msg);<a name="line.385"></a>
-<span class="sourceLineNo">386</span>        status.markComplete(msg);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      }<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    }<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    return !progressFailed;<a name="line.389"></a>
-<span class="sourceLineNo">390</span>  }<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>  private boolean isRegionDirPresentUnderRoot(TableName tableName, String regionName)<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      throws IOException {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    Path regionDirPath = CommonFSUtils.getRegionDir(this.rootDir, tableName, regionName);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    return this.rootFS.exists(regionDirPath);<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * Create a new {@link Reader} for reading logs to split.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private Reader getReader(FileStatus file, boolean skipErrors, CancelableProgressable reporter)<a name="line.401"></a>
-<span class="sourceLineNo">402</span>      throws IOException, CorruptedLogFileException {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>    Path path = file.getPath();<a name="line.403"></a>
-<span class="sourceLineNo">404</span>    long length = file.getLen();<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    Reader in;<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>    // Check for possibly empty file. With appends, currently Hadoop reports a<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    // zero length even if the file has been sync'd. Revisit if HDFS-376 or<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    // HDFS-878 is committed.<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    if (length &lt;= 0) {<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      LOG.warn("File {} might be still open, length is 0", path);<a name="line.411"></a>
-<span class="sourceLineNo">412</span>    }<a name="line.412"></a>
+<span class="sourceLineNo">187</span>  WALFactory getWalFactory(){<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    return this.walFactory;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>  }<a name="line.189"></a>
+<span class="sourceLineNo">190</span><a name="line.190"></a>
+<span class="sourceLineNo">191</span>  FileStatus getFileBeingSplit() {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>    return fileBeingSplit;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>  }<a name="line.193"></a>
+<span class="sourceLineNo">194</span><a name="line.194"></a>
+<span class="sourceLineNo">195</span>  String getTmpDirName() {<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    return this.tmpDirName;<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  }<a name="line.197"></a>
+<span class="sourceLineNo">198</span><a name="line.198"></a>
+<span class="sourceLineNo">199</span>  Map&lt;String, Map&lt;byte[], Long&gt;&gt; getRegionMaxSeqIdInStores() {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    return regionMaxSeqIdInStores;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>  }<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>  /**<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   * Splits a WAL file.<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * @return false if it is interrupted by the progress-able.<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   */<a name="line.206"></a>
+<span class="sourceLineNo">207</span>  public static boolean splitLogFile(Path walDir, FileStatus logfile, FileSystem walFS,<a name="line.207"></a>
+<span class="sourceLineNo">208</span>      Configuration conf, CancelableProgressable reporter, LastSequenceId idChecker,<a name="line.208"></a>
+<span class="sourceLineNo">209</span>      SplitLogWorkerCoordination splitLogWorkerCoordination, WALFactory factory,<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      RegionServerServices rsServices) throws IOException {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    Path rootDir = CommonFSUtils.getRootDir(conf);<a name="line.211"></a>
+<span class="sourceLineNo">212</span>    FileSystem rootFS = rootDir.getFileSystem(conf);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>    WALSplitter s = new WALSplitter(factory, conf, walDir, walFS, rootDir, rootFS, idChecker,<a name="line.213"></a>
+<span class="sourceLineNo">214</span>        splitLogWorkerCoordination, rsServices);<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    return s.splitLogFile(logfile, reporter);<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  }<a name="line.216"></a>
+<span class="sourceLineNo">217</span><a name="line.217"></a>
+<span class="sourceLineNo">218</span>  /**<a name="line.218"></a>
+<span class="sourceLineNo">219</span>   * Split a folder of WAL files. Delete the directory when done.<a name="line.219"></a>
+<span class="sourceLineNo">220</span>   * Used by tools and unit tests. It should be package private.<a name="line.220"></a>
+<span class="sourceLineNo">221</span>   * It is public only because TestWALObserver is in a different package,<a name="line.221"></a>
+<span class="sourceLineNo">222</span>   * which uses this method to do log splitting.<a name="line.222"></a>
+<span class="sourceLineNo">223</span>   * @return List of output files created by the split.<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   */<a name="line.224"></a>
+<span class="sourceLineNo">225</span>  @VisibleForTesting<a name="line.225"></a>
+<span class="sourceLineNo">226</span>  public static List&lt;Path&gt; split(Path walDir, Path logDir, Path oldLogDir, FileSystem walFS,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>      Configuration conf, final WALFactory factory) throws IOException {<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    Path rootDir = CommonFSUtils.getRootDir(conf);<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    FileSystem rootFS = rootDir.getFileSystem(conf);<a name="line.229"></a>
+<span class="sourceLineNo">230</span>    final FileStatus[] logfiles =<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        SplitLogManager.getFileList(conf, Collections.singletonList(logDir), null);<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    List&lt;Path&gt; splits = new ArrayList&lt;&gt;();<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    if (ArrayUtils.isNotEmpty(logfiles)) {<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      for (FileStatus logfile : logfiles) {<a name="line.234"></a>
+<span class="sourceLineNo">235</span>        WALSplitter s =<a name="line.235"></a>
+<span class="sourceLineNo">236</span>            new WALSplitter(factory, conf, walDir, walFS, rootDir, rootFS, null, null, null);<a name="line.236"></a>
+<span class="sourceLineNo">237</span>        if (s.splitLogFile(logfile, null)) {<a name="line.237"></a>
+<span class="sourceLineNo">238</span>          finishSplitLogFile(walDir, oldLogDir, logfile.getPath(), conf);<a name="line.238"></a>
+<span class="sourceLineNo">239</span>          if (s.outputSink.splits != null) {<a name="line.239"></a>
+<span class="sourceLineNo">240</span>            splits.addAll(s.outputSink.splits);<a name="line.240"></a>
+<span class="sourceLineNo">241</span>          }<a name="line.241"></a>
+<span class="sourceLineNo">242</span>        }<a name="line.242"></a>
+<span class="sourceLineNo">243</span>      }<a name="line.243"></a>
+<span class="sourceLineNo">244</span>    }<a name="line.244"></a>
+<span class="sourceLineNo">245</span>    if (!walFS.delete(logDir, true)) {<a name="line.245"></a>
+<span class="sourceLineNo">246</span>      throw new IOException("Unable to delete src dir: " + logDir);<a name="line.246"></a>
+<span class="sourceLineNo">247</span>    }<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    return splits;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  }<a name="line.249"></a>
+<span class="sourceLineNo">250</span><a name="line.250"></a>
+<span class="sourceLineNo">251</span>  /**<a name="line.251"></a>
+<span class="sourceLineNo">252</span>   * WAL splitting implementation, splits one log file.<a name="line.252"></a>
+<span class="sourceLineNo">253</span>   * @param logfile should be an actual log file.<a name="line.253"></a>
+<span class="sourceLineNo">254</span>   */<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  @VisibleForTesting<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  boolean splitLogFile(FileStatus logfile, CancelableProgressable reporter) throws IOException {<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    Preconditions.checkState(status == null);<a name="line.257"></a>
+<span class="sourceLineNo">258</span>    Preconditions.checkArgument(logfile.isFile(),<a name="line.258"></a>
+<span class="sourceLineNo">259</span>        "passed in file status is for something other than a regular file.");<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    boolean isCorrupted = false;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    boolean skipErrors = conf.getBoolean("hbase.hlog.split.skip.errors",<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      SPLIT_SKIP_ERRORS_DEFAULT);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    int interval = conf.getInt("hbase.splitlog.report.interval.loglines", 1024);<a name="line.263"></a>
+<span class="sourceLineNo">264</span>    Path logPath = logfile.getPath();<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    boolean outputSinkStarted = false;<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    boolean progressFailed = false;<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    int editsCount = 0;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    int editsSkipped = 0;<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>    status = TaskMonitor.get().createStatus(<a name="line.270"></a>
+<span class="sourceLineNo">271</span>          "Splitting log file " + logfile.getPath() + "into a temporary staging area.");<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    Reader logFileReader = null;<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    this.fileBeingSplit = logfile;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    long startTS = EnvironmentEdgeManager.currentTime();<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    try {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      long logLength = logfile.getLen();<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      LOG.info("Splitting WAL={}, size={} ({} bytes)", logPath, StringUtils.humanSize(logLength),<a name="line.277"></a>
+<span class="sourceLineNo">278</span>          logLength);<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      status.setStatus("Opening log file");<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>        progressFailed = true;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>        return false;<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      }<a name="line.283"></a>
+<span class="sourceLineNo">284</span>      logFileReader = getReader(logfile, skipErrors, reporter);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      if (logFileReader == null) {<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        LOG.warn("Nothing to split in WAL={}", logPath);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>        return true;<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      }<a name="line.288"></a>
+<span class="sourceLineNo">289</span>      long openCost = EnvironmentEdgeManager.currentTime() - startTS;<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      LOG.info("Open WAL={} cost {} ms", logPath, openCost);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      int numOpenedFilesBeforeReporting = conf.getInt("hbase.splitlog.report.openedfiles", 3);<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      int numOpenedFilesLastCheck = 0;<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      outputSink.setReporter(reporter);<a name="line.293"></a>
+<span class="sourceLineNo">294</span>      outputSink.startWriterThreads();<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      outputSinkStarted = true;<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      Entry entry;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      Long lastFlushedSequenceId = -1L;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>      startTS = EnvironmentEdgeManager.currentTime();<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      while ((entry = getNextLogLine(logFileReader, logPath, skipErrors)) != null) {<a name="line.299"></a>
+<span class="sourceLineNo">300</span>        byte[] region = entry.getKey().getEncodedRegionName();<a name="line.300"></a>
+<span class="sourceLineNo">301</span>        String encodedRegionNameAsStr = Bytes.toString(region);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>        lastFlushedSequenceId = lastFlushedSequenceIds.get(encodedRegionNameAsStr);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        if (lastFlushedSequenceId == null) {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>          if (!(isRegionDirPresentUnderRoot(entry.getKey().getTableName(),<a name="line.304"></a>
+<span class="sourceLineNo">305</span>              encodedRegionNameAsStr))) {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>            // The region directory itself is not present in the FS. This indicates that<a name="line.306"></a>
+<span class="sourceLineNo">307</span>            // the region/table is already removed. We can just skip all the edits for this<a name="line.307"></a>
+<span class="sourceLineNo">308</span>            // region. Setting lastFlushedSequenceId as Long.MAX_VALUE so that all edits<a name="line.308"></a>
+<span class="sourceLineNo">309</span>            // will get skipped by the seqId check below.<a name="line.309"></a>
+<span class="sourceLineNo">310</span>            // See more details at https://issues.apache.org/jira/browse/HBASE-24189<a name="line.310"></a>
+<span class="sourceLineNo">311</span>            LOG.info("{} no longer available in the FS. Skipping all edits for this region.",<a name="line.311"></a>
+<span class="sourceLineNo">312</span>                encodedRegionNameAsStr);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>            lastFlushedSequenceId = Long.MAX_VALUE;<a name="line.313"></a>
+<span class="sourceLineNo">314</span>          } else {<a name="line.314"></a>
+<span class="sourceLineNo">315</span>            if (sequenceIdChecker != null) {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>              RegionStoreSequenceIds ids = sequenceIdChecker.getLastSequenceId(region);<a name="line.316"></a>
+<span class="sourceLineNo">317</span>              Map&lt;byte[], Long&gt; maxSeqIdInStores = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.317"></a>
+<span class="sourceLineNo">318</span>              for (StoreSequenceId storeSeqId : ids.getStoreSequenceIdList()) {<a name="line.318"></a>
+<span class="sourceLineNo">319</span>                maxSeqIdInStores.put(storeSeqId.getFamilyName().toByteArray(),<a name="line.319"></a>
+<span class="sourceLineNo">320</span>                    storeSeqId.getSequenceId());<a name="line.320"></a>
+<span class="sourceLineNo">321</span>              }<a name="line.321"></a>
+<span class="sourceLineNo">322</span>              regionMaxSeqIdInStores.put(encodedRegionNameAsStr, maxSeqIdInStores);<a name="line.322"></a>
+<span class="sourceLineNo">323</span>              lastFlushedSequenceId = ids.getLastFlushedSequenceId();<a name="line.323"></a>
+<span class="sourceLineNo">324</span>              if (LOG.isDebugEnabled()) {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>                LOG.debug("DLS Last flushed sequenceid for " + encodedRegionNameAsStr + ": "<a name="line.325"></a>
+<span class="sourceLineNo">326</span>                    + TextFormat.shortDebugString(ids));<a name="line.326"></a>
+<span class="sourceLineNo">327</span>              }<a name="line.327"></a>
+<span class="sourceLineNo">328</span>            }<a name="line.328"></a>
+<span class="sourceLineNo">329</span>            if (lastFlushedSequenceId == null) {<a name="line.329"></a>
+<span class="sourceLineNo">330</span>              lastFlushedSequenceId = -1L;<a name="line.330"></a>
+<span class="sourceLineNo">331</span>            }<a name="line.331"></a>
+<span class="sourceLineNo">332</span>          }<a name="line.332"></a>
+<span class="sourceLineNo">333</span>          lastFlushedSequenceIds.put(encodedRegionNameAsStr, lastFlushedSequenceId);<a name="line.333"></a>
+<span class="sourceLineNo">334</span>        }<a name="line.334"></a>
+<span class="sourceLineNo">335</span>        if (lastFlushedSequenceId &gt;= entry.getKey().getSequenceId()) {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>          editsSkipped++;<a name="line.336"></a>
+<span class="sourceLineNo">337</span>          continue;<a name="line.337"></a>
+<span class="sourceLineNo">338</span>        }<a name="line.338"></a>
+<span class="sourceLineNo">339</span>        // Don't send Compaction/Close/Open region events to recovered edit type sinks.<a name="line.339"></a>
+<span class="sourceLineNo">340</span>        if (entry.getEdit().isMetaEdit() &amp;&amp; !outputSink.keepRegionEvent(entry)) {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>          editsSkipped++;<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          continue;<a name="line.342"></a>
+<span class="sourceLineNo">343</span>        }<a name="line.343"></a>
+<span class="sourceLineNo">344</span>        entryBuffers.appendEntry(entry);<a name="line.344"></a>
+<span class="sourceLineNo">345</span>        editsCount++;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>        int moreWritersFromLastCheck = this.getNumOpenWriters() - numOpenedFilesLastCheck;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        // If sufficient edits have passed, check if we should report progress.<a name="line.347"></a>
+<span class="sourceLineNo">348</span>        if (editsCount % interval == 0<a name="line.348"></a>
+<span class="sourceLineNo">349</span>            || moreWritersFromLastCheck &gt; numOpenedFilesBeforeReporting) {<a name="line.349"></a>
+<span class="sourceLineNo">350</span>          numOpenedFilesLastCheck = this.getNumOpenWriters();<a name="line.350"></a>
+<span class="sourceLineNo">351</span>          String countsStr = (editsCount - (editsSkipped + outputSink.getTotalSkippedEdits()))<a name="line.351"></a>
+<span class="sourceLineNo">352</span>              + " edits, skipped " + editsSkipped + " edits.";<a name="line.352"></a>
+<span class="sourceLineNo">353</span>          status.setStatus("Split " + countsStr);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>          if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.354"></a>
+<span class="sourceLineNo">355</span>            progressFailed = true;<a name="line.355"></a>
+<span class="sourceLineNo">356</span>            return false;<a name="line.356"></a>
+<span class="sourceLineNo">357</span>          }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>        }<a name="line.358"></a>
+<span class="sourceLineNo">359</span>      }<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    } catch (InterruptedException ie) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      IOException iie = new InterruptedIOException();<a name="line.361"></a>
+<span class="sourceLineNo">362</span>      iie.initCause(ie);<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      throw iie;<a name="line.363"></a>
+<span class="sourceLineNo">364</span>    } catch (CorruptedLogFileException e) {<a name="line.364"></a>
+<span class="sourceLineNo">365</span>      LOG.warn("Could not parse, corrupted WAL={}", logPath, e);<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      if (splitLogWorkerCoordination != null) {<a name="line.366"></a>
+<span class="sourceLineNo">367</span>        // Some tests pass in a csm of null.<a name="line.367"></a>
+<span class="sourceLineNo">368</span>        splitLogWorkerCoordination.markCorrupted(walDir, logfile.getPath().getName(), walFS);<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      } else {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>        // for tests only<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        ZKSplitLog.markCorrupted(walDir, logfile.getPath().getName(), walFS);<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      }<a name="line.372"></a>
+<span class="sourceLineNo">373</span>      isCorrupted = true;<a name="line.373"></a>
+<span class="sourceLineNo">374</span>    } catch (IOException e) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      e = e instanceof RemoteException ? ((RemoteException) e).unwrapRemoteException() : e;<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      throw e;<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    } finally {<a name="line.377"></a>
+<span class="sourceLineNo">378</span>      LOG.debug("Finishing writing output logs and closing down");<a name="line.378"></a>
+<span class="sourceLineNo">379</span>      try {<a name="line.379"></a>
+<span class="sourceLineNo">380</span>        if (null != logFileReader) {<a name="line.380"></a>
+<span class="sourceLineNo">381</span>          logFileReader.close();<a name="line.381"></a>
+<span class="sourceLineNo">382</span>        }<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      } catch (IOException exception) {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>        LOG.warn("Could not close WAL reader", exception);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      try {<a name="line.386"></a>
+<span class="sourceLineNo">387</span>        if (outputSinkStarted) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>          // Set progress_failed to true as the immediate following statement will reset its value<a name="line.388"></a>
+<span class="sourceLineNo">389</span>          // when close() throws exception, progress_failed has the right value<a name="line.389"></a>
+<span class="sourceLineNo">390</span>          progressFailed = true;<a name="line.390"></a>
+<span class="sourceLineNo">391</span>          progressFailed = outputSink.close() == null;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>        }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      } finally {<a name="line.393"></a>
+<span class="sourceLineNo">394</span>        long processCost = EnvironmentEdgeManager.currentTime() - startTS;<a name="line.394"></a>
+<span class="sourceLineNo">395</span>        // See if length got updated post lease recovery<a name="line.395"></a>
+<span class="sourceLineNo">396</span>        String msg = "Processed " + editsCount + " edits across " +<a name="line.396"></a>
+<span class="sourceLineNo">397</span>            outputSink.getNumberOfRecoveredRegions() + " regions cost " + processCost +<a name="line.397"></a>
+<span class="sourceLineNo">398</span>            " ms; edits skipped=" + editsSkipped + "; WAL=" + logPath + ", size=" +<a name="line.398"></a>
+<span class="sourceLineNo">399</span>            StringUtils.humanSize(logfile.getLen()) + ", length=" + logfile.getLen() +<a name="line.399"></a>
+<span class="sourceLineNo">400</span>            ", corrupted=" + isCorrupted + ", progress failed=" + progressFailed;<a name="line.400"></a>
+<span class="sourceLineNo">401</span>        LOG.info(msg);<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        status.markComplete(msg);<a name="line.402"></a>
+<span class="sourceLineNo">403</span>      }<a name="line.403"></a>
+<span class="sourceLineNo">404</span>    }<a name="line.404"></a>
+<span class="sourceLineNo">405</span>    return !progressFailed;<a name="line.405"></a>
+<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
+<span class="sourceLineNo">407</span><a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private boolean isRegionDirPresentUnderRoot(TableName tableName, String regionName)<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      throws IOException {<a name="line.409"></a>
+<span class="sourceLineNo">410</span>    Path regionDirPath = CommonFSUtils.getRegionDir(this.rootDir, tableName, regionName);<a name="line.410"></a>
+<span class="sourceLineNo">411</span>    return this.rootFS.exists(regionDirPath);<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
 <span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>    try {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>      RecoverLeaseFSUtils.recoverFileLease(walFS, path, conf, reporter);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>      try {<a name="line.416"></a>
-<span class="sourceLineNo">417</span>        in = getReader(path, reporter);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      } catch (EOFException e) {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>        if (length &lt;= 0) {<a name="line.419"></a>
-<span class="sourceLineNo">420</span>          // TODO should we ignore an empty, not-last log file if skip.errors<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          // is false? Either way, the caller should decide what to do. E.g.<a name="line.421"></a>
-<span class="sourceLineNo">422</span>          // ignore if this is the last log in sequence.<a name="line.422"></a>
-<span class="sourceLineNo">423</span>          // TODO is this scenario still possible if the log has been<a name="line.423"></a>
-<span class="sourceLineNo">424</span>          // recovered (i.e. closed)<a name="line.424"></a>
-<span class="sourceLineNo">425</span>          LOG.warn("Could not open {} for reading. File is empty", path, e);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        }<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        // EOFException being ignored<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        return null;<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      }<a name="line.429"></a>
-<span class="sourceLineNo">430</span>    } catch (IOException e) {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>      if (e instanceof FileNotFoundException) {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        // A wal file may not exist anymore. Nothing can be recovered so move on<a name="line.432"></a>
-<span class="sourceLineNo">433</span>        LOG.warn("File {} does not exist anymore", path, e);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>        return null;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      }<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      if (!skipErrors || e instanceof InterruptedIOException) {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>        throw e; // Don't mark the file corrupted if interrupted, or not skipErrors<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      }<a name="line.438"></a>
-<span class="sourceLineNo">439</span>      throw new CorruptedLogFileException("skipErrors=true Could not open wal "<a name="line.439"></a>
-<span class="sourceLineNo">440</span>        + path + " ignoring", e);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
-<span class="sourceLineNo">442</span>    return in;<a name="line.442"></a>
-<span class="sourceLineNo">443</span>  }<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>  private Entry getNextLogLine(Reader in, Path path, boolean skipErrors)<a name="line.445"></a>
-<span class="sourceLineNo">446</span>      throws CorruptedLogFileException, IOException {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>    try {<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      return in.next();<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    } catch (EOFException eof) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      // truncated files are expected if a RS crashes (see HBASE-2643)<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      LOG.info("EOF from wal {}. Continuing.", path);<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      return null;<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    } catch (IOException e) {<a name="line.453"></a>
-<span class="sourceLineNo">454</span>      // If the IOE resulted from bad file format,<a name="line.454"></a>
-<span class="sourceLineNo">455</span>      // then this problem is idempotent and retrying won't help<a name="line.455"></a>
-<span class="sourceLineNo">456</span>      if (e.getCause() != null &amp;&amp; (e.getCause() instanceof ParseException<a name="line.456"></a>
-<span class="sourceLineNo">457</span>          || e.getCause() instanceof org.apache.hadoop.fs.ChecksumException)) {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        LOG.warn("Parse exception from wal {}. Continuing", path, e);<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        return null;<a name="line.459"></a>
-<span class="sourceLineNo">460</span>      }<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      if (!skipErrors) {<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        throw e;<a name="line.462"></a>
-<span class="sourceLineNo">463</span>      }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      throw new CorruptedLogFileException("skipErrors=true Ignoring exception"<a name="line.464"></a>
-<span class="sourceLineNo">465</span>        + " while parsing wal " + path + ". Marking as corrupted", e);<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    }<a name="line.466"></a>
-<span class="sourceLineNo">467</span>  }<a name="line.467"></a>
-<span class="sourceLineNo">468</span><a name="line.468"></a>
-<span class="sourceLineNo">469</span>  /**<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   * Create a new {@link WALProvider.Writer} for writing log splits.<a name="line.470"></a>
-<span class="sourceLineNo">471</span>   * @return a new Writer instance, caller should close<a name="line.471"></a>
-<span class="sourceLineNo">472</span>   */<a name="line.472"></a>
-<span class="sourceLineNo">473</span>  protected WALProvider.Writer createWriter(Path logfile) throws IOException {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    return walFactory.createRecoveredEditsWriter(walFS, logfile);<a name="line.474"></a>
-<span class="sourceLineNo">475</span>  }<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>  /**<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * Create a new {@link Reader} for reading logs to split.<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @return new Reader instance, caller should close<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   */<a name="line.480"></a>
-<span class="sourceLineNo">481</span>  protected Reader getReader(Path curLogFile, CancelableProgressable reporter) throws IOException {<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    return walFactory.createReader(walFS, curLogFile, reporter);<a name="line.482"></a>
+<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   * Create a new {@link Reader} for reading logs to split.<a name="line.415"></a>
+<span class="sourceLineNo">416</span>   */<a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private Reader getReader(FileStatus file, boolean skipErrors, CancelableProgressable reporter)<a name="line.417"></a>
+<span class="sourceLineNo">418</span>      throws IOException, CorruptedLogFileException {<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    Path path = file.getPath();<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    long length = file.getLen();<a name="line.420"></a>
+<span class="sourceLineNo">421</span>    Reader in;<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>    // Check for possibly empty file. With appends, currently Hadoop reports a<a name="line.423"></a>
+<span class="sourceLineNo">424</span>    // zero length even if the file has been sync'd. Revisit if HDFS-376 or<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    // HDFS-878 is committed.<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    if (length &lt;= 0) {<a name="line.426"></a>
+<span class="sourceLineNo">427</span>      LOG.warn("File {} might be still open, length is 0", path);<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    }<a name="line.428"></a>
+<span class="sourceLineNo">429</span><a name="line.429"></a>
+<span class="sourceLineNo">430</span>    try {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      RecoverLeaseFSUtils.recoverFileLease(walFS, path, conf, reporter);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>      try {<a name="line.432"></a>
+<span class="sourceLineNo">433</span>        in = getReader(path, reporter);<a name="line.433"></a>
+<span class="sourceLineNo">434</span>      } catch (EOFException e) {<a name="line.434"></a>
+<span class="sourceLineNo">435</span>        if (length &lt;= 0) {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>          // TODO should we ignore an empty, not-last log file if skip.errors<a name="line.436"></a>
+<span class="sourceLineNo">437</span>          // is false? Either way, the caller should decide what to do. E.g.<a name="line.437"></a>
+<span class="sourceLineNo">438</span>          // ignore if this is the last log in sequence.<a name="line.438"></a>
+<span class="sourceLineNo">439</span>          // TODO is this scenario still possible if the log has been<a name="line.439"></a>
+<span class="sourceLineNo">440</span>          // recovered (i.e. closed)<a name="line.440"></a>
+<span class="sourceLineNo">441</span>          LOG.warn("Could not open {} for reading. File is empty", path, e);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        }<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        // EOFException being ignored<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        return null;<a name="line.444"></a>
+<span class="sourceLineNo">445</span>      }<a name="line.445"></a>
+<span class="sourceLineNo">446</span>    } catch (IOException e) {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      if (e instanceof FileNotFoundException) {<a name="line.447"></a>
+<span class="sourceLineNo">448</span>        // A wal file may not exist anymore. Nothing can be recovered so move on<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        LOG.warn("File {} does not exist anymore", path, e);<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return null;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      }<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      if (!skipErrors || e instanceof InterruptedIOException) {<a name="line.452"></a>
+<span class="sourceLineNo">453</span>        throw e; // Don't mark the file corrupted if interrupted, or not skipErrors<a name="line.453"></a>
+<span class="sourceLineNo">454</span>      }<a name="line.454"></a>
+<span class="sourceLineNo">455</span>      throw new CorruptedLogFileException("skipErrors=true Could not open wal "<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        + path + " ignoring", e);<a name="line.456"></a>
+<span class="sourceLineNo">457</span>    }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    return in;<a name="line.458"></a>
+<span class="sourceLineNo">459</span>  }<a name="line.459"></a>
+<span class="sourceLineNo">460</span><a name="line.460"></a>
+<span class="sourceLineNo">461</span>  private Entry getNextLogLine(Reader in, Path path, boolean skipErrors)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      throws CorruptedLogFileException, IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>    try {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      return in.next();<a name="line.464"></a>
+<span class="sourceLineNo">465</span>    } catch (EOFException eof) {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>      // truncated files are expected if a RS crashes (see HBASE-2643)<a name="line.466"></a>
+<span class="sourceLineNo">467</span>      LOG.info("EOF from wal {}. Continuing.", path);<a name="line.467"></a>
+<span class="sourceLineNo">468</span>      return null;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>    } catch (IOException e) {<a name="line.469"></a>
+<span class="sourceLineNo">470</span>      // If the IOE resulted from bad file format,<a name="line.470"></a>
+<span class="sourceLineNo">471</span>      // then this problem is idempotent and retrying won't help<a name="line.471"></a>
+<span class="sourceLineNo">472</span>      if (e.getCause() != null &amp;&amp; (e.getCause() instanceof ParseException<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          || e.getCause() instanceof org.apache.hadoop.fs.ChecksumException)) {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>        LOG.warn("Parse exception from wal {}. Continuing", path, e);<a name="line.474"></a>
+<span class="sourceLineNo">475</span>        return null;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>      }<a name="line.476"></a>
+<span class="sourceLineNo">477</span>      if (!skipErrors) {<a name="line.477"></a>
+<span class="sourceLineNo">478</span>        throw e;<a name="line.478"></a>
+<span class="sourceLineNo">479</span>      }<a name="line.479"></a>
+<span class="sourceLineNo">480</span>      throw new CorruptedLogFileException("skipErrors=true Ignoring exception"<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        + " while parsing wal " + path + ". Marking as corrupted", e);<a name="line.481"></a>
+<span class="sourceLineNo">482</span>    }<a name="line.482"></a>
 <span class="sourceLineNo">483</span>  }<a name="line.483"></a>
 <span class="sourceLineNo">484</span><a name="line.484"></a>
 <span class="sourceLineNo">485</span>  /**<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * Get current open writers<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   */<a name="line.487"></a>
-<span class="sourceLineNo">488</span>  private int getNumOpenWriters() {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    int result = 0;<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    if (this.outputSink != null) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      result += this.outputSink.getNumOpenWriters();<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    }<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    return result;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>  }<a name="line.494"></a>
-<span class="sourceLineNo">495</span><a name="line.495"></a>
-<span class="sourceLineNo">496</span>  /**<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * Contains some methods to control WAL-entries producer / consumer interactions<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   */<a name="line.498"></a>
-<span class="sourceLineNo">499</span>  public static class PipelineController {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    // If an exception is thrown by one of the other threads, it will be<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    // stored here.<a name="line.501"></a>
-<span class="sourceLineNo">502</span>    AtomicReference&lt;Throwable&gt; thrown = new AtomicReference&lt;&gt;();<a name="line.502"></a>
-<span class="sourceLineNo">503</span><a name="line.503"></a>
-<span class="sourceLineNo">504</span>    // Wait/notify for when data has been produced by the writer thread,<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    // consumed by the reader thread, or an exception occurred<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    final Object dataAvailable = new Object();<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>    void writerThreadError(Throwable t) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      thrown.compareAndSet(null, t);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>    }<a name="line.510"></a>
+<span class="sourceLineNo">486</span>   * Create a new {@link WALProvider.Writer} for writing log splits.<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * @return a new Writer instance, caller should close<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   */<a name="line.488"></a>
+<span class="sourceLineNo">489</span>  protected WALProvider.Writer createWriter(Path logfile) throws IOException {<a name="line.489"></a>
+<span class="sourceLineNo">490</span>    return walFactory.createRecoveredEditsWriter(walFS, logfile);<a name="line.490"></a>
+<span class="sourceLineNo">491</span>  }<a name="line.491"></a>
+<span class="sourceLineNo">492</span><a name="line.492"></a>
+<span class="sourceLineNo">493</span>  /**<a name="line.493"></a>
+<span class="sourceLineNo">494</span>   * Create a new {@link Reader} for reading logs to split.<a name="line.494"></a>
+<span class="sourceLineNo">495</span>   * @return new Reader instance, caller should close<a name="line.495"></a>
+<span class="sourceLineNo">496</span>   */<a name="line.496"></a>
+<span class="sourceLineNo">497</span>  protected Reader getReader(Path curLogFile, CancelableProgressable reporter) throws IOException {<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    return walFactory.createReader(walFS, curLogFile, reporter);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>  }<a name="line.499"></a>
+<span class="sourceLineNo">500</span><a name="line.500"></a>
+<span class="sourceLineNo">501</span>  /**<a name="line.501"></a>
+<span class="sourceLineNo">502</span>   * Get current open writers<a name="line.502"></a>
+<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
+<span class="sourceLineNo">504</span>  private int getNumOpenWriters() {<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    int result = 0;<a name="line.505"></a>
+<span class="sourceLineNo">506</span>    if (this.outputSink != null) {<a name="line.506"></a>
+<span class="sourceLineNo">507</span>      result += this.outputSink.getNumOpenWriters();<a name="line.507"></a>
+<span class="sourceLineNo">508</span>    }<a name="line.508"></a>
+<span class="sourceLineNo">509</span>    return result;<a name="line.509"></a>
+<span class="sourceLineNo">510</span>  }<a name="line.510"></a>
 <span class="sourceLineNo">511</span><a name="line.511"></a>
-<span class="sourceLineNo">512</span>    /**<a name="line.512"></a>
-<span class="sourceLineNo">513</span>     * Check for errors in the writer threads. If any is found, rethrow it.<a name="line.513"></a>
-<span class="sourceLineNo">514</span>     */<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    void checkForErrors() throws IOException {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      Throwable thrown = this.thrown.get();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      if (thrown == null) {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>        return;<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>      if (thrown instanceof IOException) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>        throw new IOException(thrown);<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      } else {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>        throw new RuntimeException(thrown);<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      }<a name="line.524"></a>
-<span class="sourceLineNo">525</span>    }<a name="line.525"></a>
-<span class="sourceLineNo">526</span>  }<a name="line.526"></a>
+<span class="sourceLineNo">512</span>  /**<a name="line.512"></a>
+<span class="sourceLineNo">513</span>   * Contains some methods to control WAL-entries producer / consumer interactions<a name="line.513"></a>
+<span class="sourceLineNo">514</span>   */<a name="line.514"></a>
+<span class="sourceLineNo">515</span>  public static class PipelineController {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    // If an exception is thrown by one of the other threads, it will be<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    // stored here.<a name="line.517"></a>
+<span class="sourceLineNo">518</span>    AtomicReference&lt;Throwable&gt; thrown = new AtomicReference&lt;&gt;();<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>    // Wait/notify for when data has been produced by the writer thread,<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    // consumed by the reader thread, or an exception occurred<a name="line.521"></a>
+<span class="sourceLineNo">522</span>    final Object dataAvailable = new Object();<a name="line.522"></a>
+<span class="sourceLineNo">523</span><a name="line.523"></a>
+<span class="sourceLineNo">524</span>    void writerThreadError(Throwable t) {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>      thrown.compareAndSet(null, t);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>    }<a name="line.526"></a>
 <span class="sourceLineNo">527</span><a name="line.527"></a>
-<span class="sourceLineNo">528</span>  static class CorruptedLogFileException extends Exception {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    private static final long serialVersionUID = 1L;<a name="line.529"></a>
-<span class="sourceLineNo">530</span><a name="line.530"></a>
-<span class="sourceLineNo">531</span>    CorruptedLogFileException(String s) {<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      super(s);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    }<a name="line.533"></a>
-<span class="sourceLineNo">534</span><a name="line.534"></a>
-<span class="sourceLineNo">535</span>    /**<a name="line.535"></a>
-<span class="sourceLineNo">536</span>     * CorruptedLogFileException with cause<a name="line.536"></a>
-<span class="sourceLineNo">537</span>     *<a name="line.537"></a>
-<span class="sourceLineNo">538</span>     * @param message the message for this exception<a name="line.538"></a>
-<span class="sourceLineNo">539</span>     * @param cause the cause for this exception<a name="line.539"></a>
-<span class="sourceLineNo">540</span>     */<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    CorruptedLogFileException(String message, Throwable cause) {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      super(message, cause);<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>  }<a name="line.544"></a>
-<span class="sourceLineNo">545</span>}<a name="line.545"></a>
+<span class="sourceLineNo">528</span>    /**<a name="line.528"></a>
+<span class="sourceLineNo">529</span>     * Check for errors in the writer threads. If any is found, rethrow it.<a name="line.529"></a>
+<span class="sourceLineNo">530</span>     */<a name="line.530"></a>
+<span class="sourceLineNo">531</span>    void checkForErrors() throws IOException {<a name="line.531"></a>
+<span class="sourceLineNo">532</span>      Throwable thrown = this.thrown.get();<a name="line.532"></a>
+<span class="sourceLineNo">533</span>      if (thrown == null) {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>        return;<a name="line.534"></a>
+<span class="sourceLineNo">535</span>      }<a name="line.535"></a>
+<span class="sourceLineNo">536</span>      if (thrown instanceof IOException) {<a name="line.536"></a>
+<span class="sourceLineNo">537</span>        throw new IOException(thrown);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>      } else {<a name="line.538"></a>
+<span class="sourceLineNo">539</span>        throw new RuntimeException(thrown);<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      }<a name="line.540"></a>
+<span class="sourceLineNo">541</span>    }<a name="line.541"></a>
+<span class="sourceLineNo">542</span>  }<a name="line.542"></a>
+<span class="sourceLineNo">543</span><a name="line.543"></a>
+<span class="sourceLineNo">544</span>  static class CorruptedLogFileException extends Exception {<a name="line.544"></a>
+<span class="sourceLineNo">545</span>    private static final long serialVersionUID = 1L;<a name="line.545"></a>
+<span class="sourceLineNo">546</span><a name="line.546"></a>
+<span class="sourceLineNo">547</span>    CorruptedLogFileException(String s) {<a name="line.547"></a>
+<span class="sourceLineNo">548</span>      super(s);<a name="line.548"></a>
+<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
+<span class="sourceLineNo">550</span><a name="line.550"></a>
+<span class="sourceLineNo">551</span>    /**<a name="line.551"></a>
+<span class="sourceLineNo">552</span>     * CorruptedLogFileException with cause<a name="line.552"></a>
+<span class="sourceLineNo">553</span>     *<a name="line.553"></a>
+<span class="sourceLineNo">554</span>     * @param message the message for this exception<a name="line.554"></a>
+<span class="sourceLineNo">555</span>     * @param cause the cause for this exception<a name="line.555"></a>
+<span class="sourceLineNo">556</span>     */<a name="line.556"></a>
+<span class="sourceLineNo">557</span>    CorruptedLogFileException(String message, Throwable cause) {<a name="line.557"></a>
+<span class="sourceLineNo">558</span>      super(message, cause);<a name="line.558"></a>
+<span class="sourceLineNo">559</span>    }<a name="line.559"></a>
+<span class="sourceLineNo">560</span>  }<a name="line.560"></a>
+<span class="sourceLineNo">561</span>}<a name="line.561"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.html
index 8b6211e..e7488c7 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.html
@@ -6,7 +6,7 @@
 </head>
 <body>
 <div class="sourceContainer">
-<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a>
+<pre><span class="sourceLineNo">001</span>/*<a name="line.1"></a>
 <span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a>
 <span class="sourceLineNo">003</span> * or more contributor license agreements.  See the NOTICE file<a name="line.3"></a>
 <span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a>
@@ -25,532 +25,548 @@
 <span class="sourceLineNo">017</span> */<a name="line.17"></a>
 <span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.wal;<a name="line.18"></a>
 <span class="sourceLineNo">019</span><a name="line.19"></a>
-<span class="sourceLineNo">020</span>import static org.apache.hadoop.hbase.wal.BoundedRecoveredHFilesOutputSink.DEFAULT_WAL_SPLIT_TO_HFILE;<a name="line.20"></a>
-<span class="sourceLineNo">021</span>import static org.apache.hadoop.hbase.wal.BoundedRecoveredHFilesOutputSink.WAL_SPLIT_TO_HFILE;<a name="line.21"></a>
-<span class="sourceLineNo">022</span>import static org.apache.hadoop.hbase.wal.WALSplitUtil.finishSplitLogFile;<a name="line.22"></a>
-<span class="sourceLineNo">023</span><a name="line.23"></a>
-<span class="sourceLineNo">024</span>import java.io.EOFException;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import java.io.FileNotFoundException;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import java.io.IOException;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import java.io.InterruptedIOException;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import java.text.ParseException;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.util.ArrayList;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import java.util.Collections;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import java.util.List;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import java.util.Map;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import java.util.TreeMap;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.conf.Configuration;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileStatus;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileSystem;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.Path;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.HConstants;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.TableDescriptors;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.procedure2.util.StringUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.regionserver.RegionServerServices;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.CommonFSUtils;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.util.FSTableDescriptors;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.util.RecoverLeaseFSUtils;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.slf4j.Logger;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.slf4j.LoggerFactory;<a name="line.65"></a>
-<span class="sourceLineNo">066</span><a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.69"></a>
-<span class="sourceLineNo">070</span><a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;<a name="line.72"></a>
-<span class="sourceLineNo">073</span><a name="line.73"></a>
-<span class="sourceLineNo">074</span>/**<a name="line.74"></a>
-<span class="sourceLineNo">075</span> * This class is responsible for splitting up a bunch of regionserver commit log<a name="line.75"></a>
-<span class="sourceLineNo">076</span> * files that are no longer being written to, into new files, one per region, for<a name="line.76"></a>
-<span class="sourceLineNo">077</span> * recovering data on startup. Delete the old log files when finished.<a name="line.77"></a>
-<span class="sourceLineNo">078</span> */<a name="line.78"></a>
-<span class="sourceLineNo">079</span>@InterfaceAudience.Private<a name="line.79"></a>
-<span class="sourceLineNo">080</span>public class WALSplitter {<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  private static final Logger LOG = LoggerFactory.getLogger(WALSplitter.class);<a name="line.81"></a>
-<span class="sourceLineNo">082</span><a name="line.82"></a>
-<span class="sourceLineNo">083</span>  /** By default we retry errors in splitting, rather than skipping. */<a name="line.83"></a>
-<span class="sourceLineNo">084</span>  public static final boolean SPLIT_SKIP_ERRORS_DEFAULT = false;<a name="line.84"></a>
-<span class="sourceLineNo">085</span><a name="line.85"></a>
-<span class="sourceLineNo">086</span>  // Parameters for split process<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  protected final Path walDir;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  protected final FileSystem walFS;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  protected final Configuration conf;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  final Path rootDir;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  final FileSystem rootFS;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  final RegionServerServices rsServices;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  final TableDescriptors tableDescriptors;<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  // Major subcomponents of the split process.<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  // These are separated into inner classes to make testing easier.<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  OutputSink outputSink;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  private EntryBuffers entryBuffers;<a name="line.98"></a>
-<span class="sourceLineNo">099</span><a name="line.99"></a>
-<span class="sourceLineNo">100</span>  private SplitLogWorkerCoordination splitLogWorkerCoordination;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  private final WALFactory walFactory;<a name="line.101"></a>
-<span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private MonitoredTask status;<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  // For checking the latest flushed sequence id<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  protected final LastSequenceId sequenceIdChecker;<a name="line.106"></a>
+<span class="sourceLineNo">020</span>import static org.apache.hadoop.hbase.wal.WALSplitUtil.finishSplitLogFile;<a name="line.20"></a>
+<span class="sourceLineNo">021</span>import java.io.EOFException;<a name="line.21"></a>
+<span class="sourceLineNo">022</span>import java.io.FileNotFoundException;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import java.io.IOException;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import java.io.InterruptedIOException;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import java.text.ParseException;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import java.util.ArrayList;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import java.util.Collections;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import java.util.List;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import java.util.Map;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.TreeMap;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.conf.Configuration;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FileStatus;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.FileSystem;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.Path;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.HConstants;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.TableDescriptors;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.TableName;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.procedure2.util.StringUtils;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.regionserver.RegionServerServices;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.CommonFSUtils;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.util.FSTableDescriptors;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.util.RecoverLeaseFSUtils;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.slf4j.Logger;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.slf4j.LoggerFactory;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;<a name="line.67"></a>
+<span class="sourceLineNo">068</span><a name="line.68"></a>
+<span class="sourceLineNo">069</span>/**<a name="line.69"></a>
+<span class="sourceLineNo">070</span> * Split RegionServer WAL files. Splits the WAL into new files,<a name="line.70"></a>
+<span class="sourceLineNo">071</span> * one per region, to be picked up on Region reopen. Deletes the split WAL when finished.<a name="line.71"></a>
+<span class="sourceLineNo">072</span> * See {@link #split(Path, Path, Path, FileSystem, Configuration, WALFactory)} or<a name="line.72"></a>
+<span class="sourceLineNo">073</span> * {@link #splitLogFile(Path, FileStatus, FileSystem, Configuration, CancelableProgressable,<a name="line.73"></a>
+<span class="sourceLineNo">074</span> *   LastSequenceId, SplitLogWorkerCoordination, WALFactory, RegionServerServices)} for<a name="line.74"></a>
+<span class="sourceLineNo">075</span> *   entry-point.<a name="line.75"></a>
+<span class="sourceLineNo">076</span> */<a name="line.76"></a>
+<span class="sourceLineNo">077</span>@InterfaceAudience.Private<a name="line.77"></a>
+<span class="sourceLineNo">078</span>public class WALSplitter {<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  private static final Logger LOG = LoggerFactory.getLogger(WALSplitter.class);<a name="line.79"></a>
+<span class="sourceLineNo">080</span><a name="line.80"></a>
+<span class="sourceLineNo">081</span>  /** By default we retry errors in splitting, rather than skipping. */<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  public static final boolean SPLIT_SKIP_ERRORS_DEFAULT = false;<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  // Parameters for split process<a name="line.84"></a>
+<span class="sourceLineNo">085</span>  protected final Path walDir;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>  protected final FileSystem walFS;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  protected final Configuration conf;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  final Path rootDir;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>  final FileSystem rootFS;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>  final RegionServerServices rsServices;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>  final TableDescriptors tableDescriptors;<a name="line.91"></a>
+<span class="sourceLineNo">092</span><a name="line.92"></a>
+<span class="sourceLineNo">093</span>  // Major subcomponents of the split process.<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  // These are separated into inner classes to make testing easier.<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  OutputSink outputSink;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  private EntryBuffers entryBuffers;<a name="line.96"></a>
+<span class="sourceLineNo">097</span><a name="line.97"></a>
+<span class="sourceLineNo">098</span>  /**<a name="line.98"></a>
+<span class="sourceLineNo">099</span>   * Coordinator for split log. Used by the zk-based log splitter.<a name="line.99"></a>
+<span class="sourceLineNo">100</span>   * Not used by the procedure v2-based log splitter.<a name="line.100"></a>
+<span class="sourceLineNo">101</span>   */<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  private SplitLogWorkerCoordination splitLogWorkerCoordination;<a name="line.102"></a>
+<span class="sourceLineNo">103</span><a name="line.103"></a>
+<span class="sourceLineNo">104</span>  private final WALFactory walFactory;<a name="line.104"></a>
+<span class="sourceLineNo">105</span><a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private MonitoredTask status;<a name="line.106"></a>
 <span class="sourceLineNo">107</span><a name="line.107"></a>
-<span class="sourceLineNo">108</span>  // Map encodedRegionName -&gt; lastFlushedSequenceId<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  protected Map&lt;String, Long&gt; lastFlushedSequenceIds = new ConcurrentHashMap&lt;&gt;();<a name="line.109"></a>
+<span class="sourceLineNo">108</span>  // For checking the latest flushed sequence id<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  protected final LastSequenceId sequenceIdChecker;<a name="line.109"></a>
 <span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>  // Map encodedRegionName -&gt; maxSeqIdInStores<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  protected Map&lt;String, Map&lt;byte[], Long&gt;&gt; regionMaxSeqIdInStores = new ConcurrentHashMap&lt;&gt;();<a name="line.112"></a>
+<span class="sourceLineNo">111</span>  // Map encodedRegionName -&gt; lastFlushedSequenceId<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  protected Map&lt;String, Long&gt; lastFlushedSequenceIds = new ConcurrentHashMap&lt;&gt;();<a name="line.112"></a>
 <span class="sourceLineNo">113</span><a name="line.113"></a>
-<span class="sourceLineNo">114</span>  // the file being split currently<a name="line.114"></a>
-<span class="sourceLineNo">115</span>  private FileStatus fileBeingSplit;<a name="line.115"></a>
+<span class="sourceLineNo">114</span>  // Map encodedRegionName -&gt; maxSeqIdInStores<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  protected Map&lt;String, Map&lt;byte[], Long&gt;&gt; regionMaxSeqIdInStores = new ConcurrentHashMap&lt;&gt;();<a name="line.115"></a>
 <span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>  private final String tmpDirName;<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  public final static String SPLIT_WRITER_CREATION_BOUNDED = "hbase.split.writer.creation.bounded";<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public final static String SPLIT_WAL_BUFFER_SIZE = "hbase.regionserver.hlog.splitlog.buffersize";<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  public final static String SPLIT_WAL_WRITER_THREADS =<a name="line.121"></a>
-<span class="sourceLineNo">122</span>      "hbase.regionserver.hlog.splitlog.writer.threads";<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span>  @VisibleForTesting<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  WALSplitter(final WALFactory factory, Configuration conf, Path walDir, FileSystem walFS,<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      Path rootDir, FileSystem rootFS, LastSequenceId idChecker,<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      SplitLogWorkerCoordination splitLogWorkerCoordination, RegionServerServices rsServices) {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    this.conf = HBaseConfiguration.create(conf);<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    String codecClassName =<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        conf.get(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    this.conf.set(HConstants.RPC_CODEC_CONF_KEY, codecClassName);<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    this.walDir = walDir;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    this.walFS = walFS;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    this.rootDir = rootDir;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    this.rootFS = rootFS;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    this.sequenceIdChecker = idChecker;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    this.splitLogWorkerCoordination = splitLogWorkerCoordination;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    this.rsServices = rsServices;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    if (rsServices != null) {<a name="line.139"></a>
-<span class="sourceLineNo">140</span>      this.tableDescriptors = rsServices.getTableDescriptors();<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    } else {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      this.tableDescriptors = new FSTableDescriptors(rootFS, rootDir, true, true);<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    }<a name="line.143"></a>
-<span class="sourceLineNo">144</span><a name="line.144"></a>
-<span class="sourceLineNo">145</span>    this.walFactory = factory;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>    PipelineController controller = new PipelineController();<a name="line.146"></a>
-<span class="sourceLineNo">147</span>    this.tmpDirName =<a name="line.147"></a>
-<span class="sourceLineNo">148</span>      conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY, HConstants.DEFAULT_TEMPORARY_HDFS_DIRECTORY);<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>    // if we limit the number of writers opened for sinking recovered edits<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    boolean splitWriterCreationBounded = conf.getBoolean(SPLIT_WRITER_CREATION_BOUNDED, false);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    boolean splitToHFile = conf.getBoolean(WAL_SPLIT_TO_HFILE, DEFAULT_WAL_SPLIT_TO_HFILE);<a name="line.153"></a>
-<span class="sourceLineNo">154</span>    long bufferSize = this.conf.getLong(SPLIT_WAL_BUFFER_SIZE, 128 * 1024 * 1024);<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    int numWriterThreads = this.conf.getInt(SPLIT_WAL_WRITER_THREADS, 3);<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>    if (splitToHFile) {<a name="line.157"></a>
-<span class="sourceLineNo">158</span>      entryBuffers = new BoundedEntryBuffers(controller, bufferSize);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      outputSink =<a name="line.159"></a>
-<span class="sourceLineNo">160</span>          new BoundedRecoveredHFilesOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    } else if (splitWriterCreationBounded) {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      entryBuffers = new BoundedEntryBuffers(controller, bufferSize);<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      outputSink =<a name="line.163"></a>
-<span class="sourceLineNo">164</span>          new BoundedRecoveredEditsOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.164"></a>
-<span class="sourceLineNo">165</span>    } else {<a name="line.165"></a>
-<span class="sourceLineNo">166</span>      entryBuffers = new EntryBuffers(controller, bufferSize);<a name="line.166"></a>
-<span class="sourceLineNo">167</span>      outputSink = new RecoveredEditsOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
-<span class="sourceLineNo">169</span>  }<a name="line.169"></a>
-<span class="sourceLineNo">170</span><a name="line.170"></a>
-<span class="sourceLineNo">171</span>  WALFactory getWalFactory(){<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    return this.walFactory;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>  FileStatus getFileBeingSplit() {<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    return fileBeingSplit;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>  }<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  String getTmpDirName() {<a name="line.179"></a>
-<span class="sourceLineNo">180</span>    return this.tmpDirName;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  }<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>  Map&lt;String, Map&lt;byte[], Long&gt;&gt; getRegionMaxSeqIdInStores() {<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return regionMaxSeqIdInStores;<a name="line.184"></a>
+<span class="sourceLineNo">117</span>  // the file being split currently<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  private FileStatus fileBeingSplit;<a name="line.118"></a>
+<span class="sourceLineNo">119</span><a name="line.119"></a>
+<span class="sourceLineNo">120</span>  private final String tmpDirName;<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  /**<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   * Split WAL directly to hfiles instead of into intermediary 'recovered.edits' files.<a name="line.123"></a>
+<span class="sourceLineNo">124</span>   */<a name="line.124"></a>
+<span class="sourceLineNo">125</span>  public static final String WAL_SPLIT_TO_HFILE = "hbase.wal.split.to.hfile";<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  public static final boolean DEFAULT_WAL_SPLIT_TO_HFILE = false;<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  /**<a name="line.128"></a>
+<span class="sourceLineNo">129</span>   * True if we are to run with bounded amount of writers rather than let the count blossom.<a name="line.129"></a>
+<span class="sourceLineNo">130</span>   * Default is 'false'. Does not apply if you have set 'hbase.wal.split.to.hfile' as that<a name="line.130"></a>
+<span class="sourceLineNo">131</span>   * is always bounded. Only applies when you are doing recovery to 'recovered.edits'<a name="line.131"></a>
+<span class="sourceLineNo">132</span>   * files (the old default). Bounded writing tends to have higher throughput.<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   */<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  public final static String SPLIT_WRITER_CREATION_BOUNDED = "hbase.split.writer.creation.bounded";<a name="line.134"></a>
+<span class="sourceLineNo">135</span><a name="line.135"></a>
+<span class="sourceLineNo">136</span>  public final static String SPLIT_WAL_BUFFER_SIZE = "hbase.regionserver.hlog.splitlog.buffersize";<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  public final static String SPLIT_WAL_WRITER_THREADS =<a name="line.137"></a>
+<span class="sourceLineNo">138</span>      "hbase.regionserver.hlog.splitlog.writer.threads";<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>  @VisibleForTesting<a name="line.140"></a>
+<span class="sourceLineNo">141</span>  WALSplitter(final WALFactory factory, Configuration conf, Path walDir, FileSystem walFS,<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      Path rootDir, FileSystem rootFS, LastSequenceId idChecker,<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      SplitLogWorkerCoordination splitLogWorkerCoordination, RegionServerServices rsServices) {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    this.conf = HBaseConfiguration.create(conf);<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    String codecClassName =<a name="line.145"></a>
+<span class="sourceLineNo">146</span>        conf.get(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    this.conf.set(HConstants.RPC_CODEC_CONF_KEY, codecClassName);<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    this.walDir = walDir;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    this.walFS = walFS;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    this.rootDir = rootDir;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    this.rootFS = rootFS;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    this.sequenceIdChecker = idChecker;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    this.splitLogWorkerCoordination = splitLogWorkerCoordination;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    this.rsServices = rsServices;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    if (rsServices != null) {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      this.tableDescriptors = rsServices.getTableDescriptors();<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    } else {<a name="line.157"></a>
+<span class="sourceLineNo">158</span>      this.tableDescriptors = new FSTableDescriptors(rootFS, rootDir, true, true);<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>    this.walFactory = factory;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    PipelineController controller = new PipelineController();<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    this.tmpDirName =<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY, HConstants.DEFAULT_TEMPORARY_HDFS_DIRECTORY);<a name="line.164"></a>
+<span class="sourceLineNo">165</span><a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>    // if we limit the number of writers opened for sinking recovered edits<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    boolean splitWriterCreationBounded = conf.getBoolean(SPLIT_WRITER_CREATION_BOUNDED, false);<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    boolean splitToHFile = conf.getBoolean(WAL_SPLIT_TO_HFILE, DEFAULT_WAL_SPLIT_TO_HFILE);<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    long bufferSize = this.conf.getLong(SPLIT_WAL_BUFFER_SIZE, 128 * 1024 * 1024);<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    int numWriterThreads = this.conf.getInt(SPLIT_WAL_WRITER_THREADS, 3);<a name="line.171"></a>
+<span class="sourceLineNo">172</span><a name="line.172"></a>
+<span class="sourceLineNo">173</span>    if (splitToHFile) {<a name="line.173"></a>
+<span class="sourceLineNo">174</span>      entryBuffers = new BoundedEntryBuffers(controller, bufferSize);<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      outputSink =<a name="line.175"></a>
+<span class="sourceLineNo">176</span>          new BoundedRecoveredHFilesOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.176"></a>
+<span class="sourceLineNo">177</span>    } else if (splitWriterCreationBounded) {<a name="line.177"></a>
+<span class="sourceLineNo">178</span>      entryBuffers = new BoundedEntryBuffers(controller, bufferSize);<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      outputSink =<a name="line.179"></a>
+<span class="sourceLineNo">180</span>          new BoundedRecoveredEditsOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.180"></a>
+<span class="sourceLineNo">181</span>    } else {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>      entryBuffers = new EntryBuffers(controller, bufferSize);<a name="line.182"></a>
+<span class="sourceLineNo">183</span>      outputSink = new RecoveredEditsOutputSink(this, controller, entryBuffers, numWriterThreads);<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    }<a name="line.184"></a>
 <span class="sourceLineNo">185</span>  }<a name="line.185"></a>
 <span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  /**<a name="line.187"></a>
-<span class="sourceLineNo">188</span>   * Splits a WAL file into region's recovered-edits directory.<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   * This is the main entry point for distributed log splitting from SplitLogWorker.<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * &lt;p&gt;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * If the log file has N regions then N recovered.edits files will be produced.<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * &lt;p&gt;<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   * @return false if it is interrupted by the progress-able.<a name="line.193"></a>
-<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  public static boolean splitLogFile(Path walDir, FileStatus logfile, FileSystem walFS,<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      Configuration conf, CancelableProgressable reporter, LastSequenceId idChecker,<a name="line.196"></a>
-<span class="sourceLineNo">197</span>      SplitLogWorkerCoordination splitLogWorkerCoordination, WALFactory factory,<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      RegionServerServices rsServices) throws IOException {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    Path rootDir = CommonFSUtils.getRootDir(conf);<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    FileSystem rootFS = rootDir.getFileSystem(conf);<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    WALSplitter s = new WALSplitter(factory, conf, walDir, walFS, rootDir, rootFS, idChecker,<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        splitLogWorkerCoordination, rsServices);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    return s.splitLogFile(logfile, reporter);<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  }<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  // A wrapper to split one log folder using the method used by distributed<a name="line.206"></a>
-<span class="sourceLineNo">207</span>  // log splitting. Used by tools and unit tests. It should be package private.<a name="line.207"></a>
-<span class="sourceLineNo">208</span>  // It is public only because TestWALObserver is in a different package,<a name="line.208"></a>
-<span class="sourceLineNo">209</span>  // which uses this method to do log splitting.<a name="line.209"></a>
-<span class="sourceLineNo">210</span>  @VisibleForTesting<a name="line.210"></a>
-<span class="sourceLineNo">211</span>  public static List&lt;Path&gt; split(Path walDir, Path logDir, Path oldLogDir, FileSystem walFS,<a name="line.211"></a>
-<span class="sourceLineNo">212</span>      Configuration conf, final WALFactory factory) throws IOException {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    Path rootDir = CommonFSUtils.getRootDir(conf);<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    FileSystem rootFS = rootDir.getFileSystem(conf);<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    final FileStatus[] logfiles =<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        SplitLogManager.getFileList(conf, Collections.singletonList(logDir), null);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    List&lt;Path&gt; splits = new ArrayList&lt;&gt;();<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    if (ArrayUtils.isNotEmpty(logfiles)) {<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      for (FileStatus logfile : logfiles) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>        WALSplitter s =<a name="line.220"></a>
-<span class="sourceLineNo">221</span>            new WALSplitter(factory, conf, walDir, walFS, rootDir, rootFS, null, null, null);<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        if (s.splitLogFile(logfile, null)) {<a name="line.222"></a>
-<span class="sourceLineNo">223</span>          finishSplitLogFile(walDir, oldLogDir, logfile.getPath(), conf);<a name="line.223"></a>
-<span class="sourceLineNo">224</span>          if (s.outputSink.splits != null) {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>            splits.addAll(s.outputSink.splits);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>          }<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        }<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    }<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    if (!walFS.delete(logDir, true)) {<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      throw new IOException("Unable to delete src dir: " + logDir);<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    }<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    return splits;<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  }<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  /**<a name="line.236"></a>
-<span class="sourceLineNo">237</span>   * log splitting implementation, splits one log file.<a name="line.237"></a>
-<span class="sourceLineNo">238</span>   * @param logfile should be an actual log file.<a name="line.238"></a>
-<span class="sourceLineNo">239</span>   */<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  @VisibleForTesting<a name="line.240"></a>
-<span class="sourceLineNo">241</span>  boolean splitLogFile(FileStatus logfile, CancelableProgressable reporter) throws IOException {<a name="line.241"></a>
-<span class="sourceLineNo">242</span>    Preconditions.checkState(status == null);<a name="line.242"></a>
-<span class="sourceLineNo">243</span>    Preconditions.checkArgument(logfile.isFile(),<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        "passed in file status is for something other than a regular file.");<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    boolean isCorrupted = false;<a name="line.245"></a>
-<span class="sourceLineNo">246</span>    boolean skipErrors = conf.getBoolean("hbase.hlog.split.skip.errors",<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      SPLIT_SKIP_ERRORS_DEFAULT);<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    int interval = conf.getInt("hbase.splitlog.report.interval.loglines", 1024);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    Path logPath = logfile.getPath();<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    boolean outputSinkStarted = false;<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    boolean progressFailed = false;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    int editsCount = 0;<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    int editsSkipped = 0;<a name="line.253"></a>
-<span class="sourceLineNo">254</span><a name="line.254"></a>
-<span class="sourceLineNo">255</span>    status = TaskMonitor.get().createStatus(<a name="line.255"></a>
-<span class="sourceLineNo">256</span>          "Splitting log file " + logfile.getPath() + "into a temporary staging area.");<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    Reader logFileReader = null;<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    this.fileBeingSplit = logfile;<a name="line.258"></a>
-<span class="sourceLineNo">259</span>    long startTS = EnvironmentEdgeManager.currentTime();<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    try {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      long logLength = logfile.getLen();<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      LOG.info("Splitting WAL={}, size={} ({} bytes)", logPath, StringUtils.humanSize(logLength),<a name="line.262"></a>
-<span class="sourceLineNo">263</span>          logLength);<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      status.setStatus("Opening log file");<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>        progressFailed = true;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        return false;<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      }<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      logFileReader = getReader(logfile, skipErrors, reporter);<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      if (logFileReader == null) {<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        LOG.warn("Nothing to split in WAL={}", logPath);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        return true;<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      }<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      long openCost = EnvironmentEdgeManager.currentTime() - startTS;<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      LOG.info("Open WAL={} cost {} ms", logPath, openCost);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      int numOpenedFilesBeforeReporting = conf.getInt("hbase.splitlog.report.openedfiles", 3);<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      int numOpenedFilesLastCheck = 0;<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      outputSink.setReporter(reporter);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      outputSink.startWriterThreads();<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      outputSinkStarted = true;<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      Entry entry;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      Long lastFlushedSequenceId = -1L;<a name="line.282"></a>
-<span class="sourceLineNo">283</span>      startTS = EnvironmentEdgeManager.currentTime();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>      while ((entry = getNextLogLine(logFileReader, logPath, skipErrors)) != null) {<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        byte[] region = entry.getKey().getEncodedRegionName();<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        String encodedRegionNameAsStr = Bytes.toString(region);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>        lastFlushedSequenceId = lastFlushedSequenceIds.get(encodedRegionNameAsStr);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>        if (lastFlushedSequenceId == null) {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>          if (!(isRegionDirPresentUnderRoot(entry.getKey().getTableName(), encodedRegionNameAsStr))) {<a name="line.289"></a>
-<span class="sourceLineNo">290</span>            // The region directory itself is not present in the FS. This indicates that<a name="line.290"></a>
-<span class="sourceLineNo">291</span>            // the region/table is already removed. We can just skip all the edits for this<a name="line.291"></a>
-<span class="sourceLineNo">292</span>            // region. Setting lastFlushedSequenceId as Long.MAX_VALUE so that all edits<a name="line.292"></a>
-<span class="sourceLineNo">293</span>            // will get skipped by the seqId check below.<a name="line.293"></a>
-<span class="sourceLineNo">294</span>            // See more details at https://issues.apache.org/jira/browse/HBASE-24189<a name="line.294"></a>
-<span class="sourceLineNo">295</span>            LOG.info("{} no longer available in the FS. Skipping all edits for this region.",<a name="line.295"></a>
-<span class="sourceLineNo">296</span>                encodedRegionNameAsStr);<a name="line.296"></a>
-<span class="sourceLineNo">297</span>            lastFlushedSequenceId = Long.MAX_VALUE;<a name="line.297"></a>
-<span class="sourceLineNo">298</span>          } else {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>            if (sequenceIdChecker != null) {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>              RegionStoreSequenceIds ids = sequenceIdChecker.getLastSequenceId(region);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>              Map&lt;byte[], Long&gt; maxSeqIdInStores = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.301"></a>
-<span class="sourceLineNo">302</span>              for (StoreSequenceId storeSeqId : ids.getStoreSequenceIdList()) {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>                maxSeqIdInStores.put(storeSeqId.getFamilyName().toByteArray(),<a name="line.303"></a>
-<span class="sourceLineNo">304</span>                    storeSeqId.getSequenceId());<a name="line.304"></a>
-<span class="sourceLineNo">305</span>              }<a name="line.305"></a>
-<span class="sourceLineNo">306</span>              regionMaxSeqIdInStores.put(encodedRegionNameAsStr, maxSeqIdInStores);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>              lastFlushedSequenceId = ids.getLastFlushedSequenceId();<a name="line.307"></a>
-<span class="sourceLineNo">308</span>              if (LOG.isDebugEnabled()) {<a name="line.308"></a>
-<span class="sourceLineNo">309</span>                LOG.debug("DLS Last flushed sequenceid for " + encodedRegionNameAsStr + ": "<a name="line.309"></a>
-<span class="sourceLineNo">310</span>                    + TextFormat.shortDebugString(ids));<a name="line.310"></a>
-<span class="sourceLineNo">311</span>              }<a name="line.311"></a>
-<span class="sourceLineNo">312</span>            }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>            if (lastFlushedSequenceId == null) {<a name="line.313"></a>
-<span class="sourceLineNo">314</span>              lastFlushedSequenceId = -1L;<a name="line.314"></a>
-<span class="sourceLineNo">315</span>            }<a name="line.315"></a>
-<span class="sourceLineNo">316</span>          }<a name="line.316"></a>
-<span class="sourceLineNo">317</span>          lastFlushedSequenceIds.put(encodedRegionNameAsStr, lastFlushedSequenceId);<a name="line.317"></a>
-<span class="sourceLineNo">318</span>        }<a name="line.318"></a>
-<span class="sourceLineNo">319</span>        if (lastFlushedSequenceId &gt;= entry.getKey().getSequenceId()) {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>          editsSkipped++;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>          continue;<a name="line.321"></a>
-<span class="sourceLineNo">322</span>        }<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        // Don't send Compaction/Close/Open region events to recovered edit type sinks.<a name="line.323"></a>
-<span class="sourceLineNo">324</span>        if (entry.getEdit().isMetaEdit() &amp;&amp; !outputSink.keepRegionEvent(entry)) {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>          editsSkipped++;<a name="line.325"></a>
-<span class="sourceLineNo">326</span>          continue;<a name="line.326"></a>
-<span class="sourceLineNo">327</span>        }<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        entryBuffers.appendEntry(entry);<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        editsCount++;<a name="line.329"></a>
-<span class="sourceLineNo">330</span>        int moreWritersFromLastCheck = this.getNumOpenWriters() - numOpenedFilesLastCheck;<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        // If sufficient edits have passed, check if we should report progress.<a name="line.331"></a>
-<span class="sourceLineNo">332</span>        if (editsCount % interval == 0<a name="line.332"></a>
-<span class="sourceLineNo">333</span>            || moreWritersFromLastCheck &gt; numOpenedFilesBeforeReporting) {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          numOpenedFilesLastCheck = this.getNumOpenWriters();<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          String countsStr = (editsCount - (editsSkipped + outputSink.getTotalSkippedEdits()))<a name="line.335"></a>
-<span class="sourceLineNo">336</span>              + " edits, skipped " + editsSkipped + " edits.";<a name="line.336"></a>
-<span class="sourceLineNo">337</span>          status.setStatus("Split " + countsStr);<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.338"></a>
-<span class="sourceLineNo">339</span>            progressFailed = true;<a name="line.339"></a>
-<span class="sourceLineNo">340</span>            return false;<a name="line.340"></a>
-<span class="sourceLineNo">341</span>          }<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        }<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      }<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    } catch (InterruptedException ie) {<a name="line.344"></a>
-<span class="sourceLineNo">345</span>      IOException iie = new InterruptedIOException();<a name="line.345"></a>
-<span class="sourceLineNo">346</span>      iie.initCause(ie);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      throw iie;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    } catch (CorruptedLogFileException e) {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      LOG.warn("Could not parse, corrupted WAL={}", logPath, e);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>      if (splitLogWorkerCoordination != null) {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>        // Some tests pass in a csm of null.<a name="line.351"></a>
-<span class="sourceLineNo">352</span>        splitLogWorkerCoordination.markCorrupted(walDir, logfile.getPath().getName(), walFS);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      } else {<a name="line.353"></a>
-<span class="sourceLineNo">354</span>        // for tests only<a name="line.354"></a>
-<span class="sourceLineNo">355</span>        ZKSplitLog.markCorrupted(walDir, logfile.getPath().getName(), walFS);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      }<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      isCorrupted = true;<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    } catch (IOException e) {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      e = e instanceof RemoteException ? ((RemoteException) e).unwrapRemoteException() : e;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>      throw e;<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    } finally {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>      LOG.debug("Finishing writing output logs and closing down");<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      try {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>        if (null != logFileReader) {<a name="line.364"></a>
-<span class="sourceLineNo">365</span>          logFileReader.close();<a name="line.365"></a>
-<span class="sourceLineNo">366</span>        }<a name="line.366"></a>
-<span class="sourceLineNo">367</span>      } catch (IOException exception) {<a name="line.367"></a>
-<span class="sourceLineNo">368</span>        LOG.warn("Could not close WAL reader", exception);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
-<span class="sourceLineNo">370</span>      try {<a name="line.370"></a>
-<span class="sourceLineNo">371</span>        if (outputSinkStarted) {<a name="line.371"></a>
-<span class="sourceLineNo">372</span>          // Set progress_failed to true as the immediate following statement will reset its value<a name="line.372"></a>
-<span class="sourceLineNo">373</span>          // when close() throws exception, progress_failed has the right value<a name="line.373"></a>
-<span class="sourceLineNo">374</span>          progressFailed = true;<a name="line.374"></a>
-<span class="sourceLineNo">375</span>          progressFailed = outputSink.close() == null;<a name="line.375"></a>
-<span class="sourceLineNo">376</span>        }<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      } finally {<a name="line.377"></a>
-<span class="sourceLineNo">378</span>        long processCost = EnvironmentEdgeManager.currentTime() - startTS;<a name="line.378"></a>
-<span class="sourceLineNo">379</span>        // See if length got updated post lease recovery<a name="line.379"></a>
-<span class="sourceLineNo">380</span>        String msg = "Processed " + editsCount + " edits across " +<a name="line.380"></a>
-<span class="sourceLineNo">381</span>            outputSink.getNumberOfRecoveredRegions() + " regions cost " + processCost +<a name="line.381"></a>
-<span class="sourceLineNo">382</span>            " ms; edits skipped=" + editsSkipped + "; WAL=" + logPath + ", size=" +<a name="line.382"></a>
-<span class="sourceLineNo">383</span>            StringUtils.humanSize(logfile.getLen()) + ", length=" + logfile.getLen() +<a name="line.383"></a>
-<span class="sourceLineNo">384</span>            ", corrupted=" + isCorrupted + ", progress failed=" + progressFailed;<a name="line.384"></a>
-<span class="sourceLineNo">385</span>        LOG.info(msg);<a name="line.385"></a>
-<span class="sourceLineNo">386</span>        status.markComplete(msg);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      }<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    }<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    return !progressFailed;<a name="line.389"></a>
-<span class="sourceLineNo">390</span>  }<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>  private boolean isRegionDirPresentUnderRoot(TableName tableName, String regionName)<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      throws IOException {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    Path regionDirPath = CommonFSUtils.getRegionDir(this.rootDir, tableName, regionName);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    return this.rootFS.exists(regionDirPath);<a name="line.395"></a>
-<span class="sourceLineNo">396</span>  }<a name="line.396"></a>
-<span class="sourceLineNo">397</span><a name="line.397"></a>
-<span class="sourceLineNo">398</span>  /**<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   * Create a new {@link Reader} for reading logs to split.<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  private Reader getReader(FileStatus file, boolean skipErrors, CancelableProgressable reporter)<a name="line.401"></a>
-<span class="sourceLineNo">402</span>      throws IOException, CorruptedLogFileException {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>    Path path = file.getPath();<a name="line.403"></a>
-<span class="sourceLineNo">404</span>    long length = file.getLen();<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    Reader in;<a name="line.405"></a>
-<span class="sourceLineNo">406</span><a name="line.406"></a>
-<span class="sourceLineNo">407</span>    // Check for possibly empty file. With appends, currently Hadoop reports a<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    // zero length even if the file has been sync'd. Revisit if HDFS-376 or<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    // HDFS-878 is committed.<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    if (length &lt;= 0) {<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      LOG.warn("File {} might be still open, length is 0", path);<a name="line.411"></a>
-<span class="sourceLineNo">412</span>    }<a name="line.412"></a>
+<span class="sourceLineNo">187</span>  WALFactory getWalFactory(){<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    return this.walFactory;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>  }<a name="line.189"></a>
+<span class="sourceLineNo">190</span><a name="line.190"></a>
+<span class="sourceLineNo">191</span>  FileStatus getFileBeingSplit() {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>    return fileBeingSplit;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>  }<a name="line.193"></a>
+<span class="sourceLineNo">194</span><a name="line.194"></a>
+<span class="sourceLineNo">195</span>  String getTmpDirName() {<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    return this.tmpDirName;<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  }<a name="line.197"></a>
+<span class="sourceLineNo">198</span><a name="line.198"></a>
+<span class="sourceLineNo">199</span>  Map&lt;String, Map&lt;byte[], Long&gt;&gt; getRegionMaxSeqIdInStores() {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    return regionMaxSeqIdInStores;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>  }<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>  /**<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   * Splits a WAL file.<a name="line.204"></a>
+<span class="sourceLineNo">205</span>   * @return false if it is interrupted by the progress-able.<a name="line.205"></a>
+<span class="sourceLineNo">206</span>   */<a name="line.206"></a>
+<span class="sourceLineNo">207</span>  public static boolean splitLogFile(Path walDir, FileStatus logfile, FileSystem walFS,<a name="line.207"></a>
+<span class="sourceLineNo">208</span>      Configuration conf, CancelableProgressable reporter, LastSequenceId idChecker,<a name="line.208"></a>
+<span class="sourceLineNo">209</span>      SplitLogWorkerCoordination splitLogWorkerCoordination, WALFactory factory,<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      RegionServerServices rsServices) throws IOException {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    Path rootDir = CommonFSUtils.getRootDir(conf);<a name="line.211"></a>
+<span class="sourceLineNo">212</span>    FileSystem rootFS = rootDir.getFileSystem(conf);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>    WALSplitter s = new WALSplitter(factory, conf, walDir, walFS, rootDir, rootFS, idChecker,<a name="line.213"></a>
+<span class="sourceLineNo">214</span>        splitLogWorkerCoordination, rsServices);<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    return s.splitLogFile(logfile, reporter);<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  }<a name="line.216"></a>
+<span class="sourceLineNo">217</span><a name="line.217"></a>
+<span class="sourceLineNo">218</span>  /**<a name="line.218"></a>
+<span class="sourceLineNo">219</span>   * Split a folder of WAL files. Delete the directory when done.<a name="line.219"></a>
+<span class="sourceLineNo">220</span>   * Used by tools and unit tests. It should be package private.<a name="line.220"></a>
+<span class="sourceLineNo">221</span>   * It is public only because TestWALObserver is in a different package,<a name="line.221"></a>
+<span class="sourceLineNo">222</span>   * which uses this method to do log splitting.<a name="line.222"></a>
+<span class="sourceLineNo">223</span>   * @return List of output files created by the split.<a name="line.223"></a>
+<span class="sourceLineNo">224</span>   */<a name="line.224"></a>
+<span class="sourceLineNo">225</span>  @VisibleForTesting<a name="line.225"></a>
+<span class="sourceLineNo">226</span>  public static List&lt;Path&gt; split(Path walDir, Path logDir, Path oldLogDir, FileSystem walFS,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>      Configuration conf, final WALFactory factory) throws IOException {<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    Path rootDir = CommonFSUtils.getRootDir(conf);<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    FileSystem rootFS = rootDir.getFileSystem(conf);<a name="line.229"></a>
+<span class="sourceLineNo">230</span>    final FileStatus[] logfiles =<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        SplitLogManager.getFileList(conf, Collections.singletonList(logDir), null);<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    List&lt;Path&gt; splits = new ArrayList&lt;&gt;();<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    if (ArrayUtils.isNotEmpty(logfiles)) {<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      for (FileStatus logfile : logfiles) {<a name="line.234"></a>
+<span class="sourceLineNo">235</span>        WALSplitter s =<a name="line.235"></a>
+<span class="sourceLineNo">236</span>            new WALSplitter(factory, conf, walDir, walFS, rootDir, rootFS, null, null, null);<a name="line.236"></a>
+<span class="sourceLineNo">237</span>        if (s.splitLogFile(logfile, null)) {<a name="line.237"></a>
+<span class="sourceLineNo">238</span>          finishSplitLogFile(walDir, oldLogDir, logfile.getPath(), conf);<a name="line.238"></a>
+<span class="sourceLineNo">239</span>          if (s.outputSink.splits != null) {<a name="line.239"></a>
+<span class="sourceLineNo">240</span>            splits.addAll(s.outputSink.splits);<a name="line.240"></a>
+<span class="sourceLineNo">241</span>          }<a name="line.241"></a>
+<span class="sourceLineNo">242</span>        }<a name="line.242"></a>
+<span class="sourceLineNo">243</span>      }<a name="line.243"></a>
+<span class="sourceLineNo">244</span>    }<a name="line.244"></a>
+<span class="sourceLineNo">245</span>    if (!walFS.delete(logDir, true)) {<a name="line.245"></a>
+<span class="sourceLineNo">246</span>      throw new IOException("Unable to delete src dir: " + logDir);<a name="line.246"></a>
+<span class="sourceLineNo">247</span>    }<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    return splits;<a name="line.248"></a>
+<span class="sourceLineNo">249</span>  }<a name="line.249"></a>
+<span class="sourceLineNo">250</span><a name="line.250"></a>
+<span class="sourceLineNo">251</span>  /**<a name="line.251"></a>
+<span class="sourceLineNo">252</span>   * WAL splitting implementation, splits one log file.<a name="line.252"></a>
+<span class="sourceLineNo">253</span>   * @param logfile should be an actual log file.<a name="line.253"></a>
+<span class="sourceLineNo">254</span>   */<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  @VisibleForTesting<a name="line.255"></a>
+<span class="sourceLineNo">256</span>  boolean splitLogFile(FileStatus logfile, CancelableProgressable reporter) throws IOException {<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    Preconditions.checkState(status == null);<a name="line.257"></a>
+<span class="sourceLineNo">258</span>    Preconditions.checkArgument(logfile.isFile(),<a name="line.258"></a>
+<span class="sourceLineNo">259</span>        "passed in file status is for something other than a regular file.");<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    boolean isCorrupted = false;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    boolean skipErrors = conf.getBoolean("hbase.hlog.split.skip.errors",<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      SPLIT_SKIP_ERRORS_DEFAULT);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    int interval = conf.getInt("hbase.splitlog.report.interval.loglines", 1024);<a name="line.263"></a>
+<span class="sourceLineNo">264</span>    Path logPath = logfile.getPath();<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    boolean outputSinkStarted = false;<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    boolean progressFailed = false;<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    int editsCount = 0;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    int editsSkipped = 0;<a name="line.268"></a>
+<span class="sourceLineNo">269</span><a name="line.269"></a>
+<span class="sourceLineNo">270</span>    status = TaskMonitor.get().createStatus(<a name="line.270"></a>
+<span class="sourceLineNo">271</span>          "Splitting log file " + logfile.getPath() + "into a temporary staging area.");<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    Reader logFileReader = null;<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    this.fileBeingSplit = logfile;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    long startTS = EnvironmentEdgeManager.currentTime();<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    try {<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      long logLength = logfile.getLen();<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      LOG.info("Splitting WAL={}, size={} ({} bytes)", logPath, StringUtils.humanSize(logLength),<a name="line.277"></a>
+<span class="sourceLineNo">278</span>          logLength);<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      status.setStatus("Opening log file");<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>        progressFailed = true;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>        return false;<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      }<a name="line.283"></a>
+<span class="sourceLineNo">284</span>      logFileReader = getReader(logfile, skipErrors, reporter);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      if (logFileReader == null) {<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        LOG.warn("Nothing to split in WAL={}", logPath);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>        return true;<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      }<a name="line.288"></a>
+<span class="sourceLineNo">289</span>      long openCost = EnvironmentEdgeManager.currentTime() - startTS;<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      LOG.info("Open WAL={} cost {} ms", logPath, openCost);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      int numOpenedFilesBeforeReporting = conf.getInt("hbase.splitlog.report.openedfiles", 3);<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      int numOpenedFilesLastCheck = 0;<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      outputSink.setReporter(reporter);<a name="line.293"></a>
+<span class="sourceLineNo">294</span>      outputSink.startWriterThreads();<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      outputSinkStarted = true;<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      Entry entry;<a name="line.296"></a>
+<span class="sourceLineNo">297</span>      Long lastFlushedSequenceId = -1L;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>      startTS = EnvironmentEdgeManager.currentTime();<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      while ((entry = getNextLogLine(logFileReader, logPath, skipErrors)) != null) {<a name="line.299"></a>
+<span class="sourceLineNo">300</span>        byte[] region = entry.getKey().getEncodedRegionName();<a name="line.300"></a>
+<span class="sourceLineNo">301</span>        String encodedRegionNameAsStr = Bytes.toString(region);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>        lastFlushedSequenceId = lastFlushedSequenceIds.get(encodedRegionNameAsStr);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        if (lastFlushedSequenceId == null) {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>          if (!(isRegionDirPresentUnderRoot(entry.getKey().getTableName(),<a name="line.304"></a>
+<span class="sourceLineNo">305</span>              encodedRegionNameAsStr))) {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>            // The region directory itself is not present in the FS. This indicates that<a name="line.306"></a>
+<span class="sourceLineNo">307</span>            // the region/table is already removed. We can just skip all the edits for this<a name="line.307"></a>
+<span class="sourceLineNo">308</span>            // region. Setting lastFlushedSequenceId as Long.MAX_VALUE so that all edits<a name="line.308"></a>
+<span class="sourceLineNo">309</span>            // will get skipped by the seqId check below.<a name="line.309"></a>
+<span class="sourceLineNo">310</span>            // See more details at https://issues.apache.org/jira/browse/HBASE-24189<a name="line.310"></a>
+<span class="sourceLineNo">311</span>            LOG.info("{} no longer available in the FS. Skipping all edits for this region.",<a name="line.311"></a>
+<span class="sourceLineNo">312</span>                encodedRegionNameAsStr);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>            lastFlushedSequenceId = Long.MAX_VALUE;<a name="line.313"></a>
+<span class="sourceLineNo">314</span>          } else {<a name="line.314"></a>
+<span class="sourceLineNo">315</span>            if (sequenceIdChecker != null) {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>              RegionStoreSequenceIds ids = sequenceIdChecker.getLastSequenceId(region);<a name="line.316"></a>
+<span class="sourceLineNo">317</span>              Map&lt;byte[], Long&gt; maxSeqIdInStores = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.317"></a>
+<span class="sourceLineNo">318</span>              for (StoreSequenceId storeSeqId : ids.getStoreSequenceIdList()) {<a name="line.318"></a>
+<span class="sourceLineNo">319</span>                maxSeqIdInStores.put(storeSeqId.getFamilyName().toByteArray(),<a name="line.319"></a>
+<span class="sourceLineNo">320</span>                    storeSeqId.getSequenceId());<a name="line.320"></a>
+<span class="sourceLineNo">321</span>              }<a name="line.321"></a>
+<span class="sourceLineNo">322</span>              regionMaxSeqIdInStores.put(encodedRegionNameAsStr, maxSeqIdInStores);<a name="line.322"></a>
+<span class="sourceLineNo">323</span>              lastFlushedSequenceId = ids.getLastFlushedSequenceId();<a name="line.323"></a>
+<span class="sourceLineNo">324</span>              if (LOG.isDebugEnabled()) {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>                LOG.debug("DLS Last flushed sequenceid for " + encodedRegionNameAsStr + ": "<a name="line.325"></a>
+<span class="sourceLineNo">326</span>                    + TextFormat.shortDebugString(ids));<a name="line.326"></a>
+<span class="sourceLineNo">327</span>              }<a name="line.327"></a>
+<span class="sourceLineNo">328</span>            }<a name="line.328"></a>
+<span class="sourceLineNo">329</span>            if (lastFlushedSequenceId == null) {<a name="line.329"></a>
+<span class="sourceLineNo">330</span>              lastFlushedSequenceId = -1L;<a name="line.330"></a>
+<span class="sourceLineNo">331</span>            }<a name="line.331"></a>
+<span class="sourceLineNo">332</span>          }<a name="line.332"></a>
+<span class="sourceLineNo">333</span>          lastFlushedSequenceIds.put(encodedRegionNameAsStr, lastFlushedSequenceId);<a name="line.333"></a>
+<span class="sourceLineNo">334</span>        }<a name="line.334"></a>
+<span class="sourceLineNo">335</span>        if (lastFlushedSequenceId &gt;= entry.getKey().getSequenceId()) {<a name="line.335"></a>
+<span class="sourceLineNo">336</span>          editsSkipped++;<a name="line.336"></a>
+<span class="sourceLineNo">337</span>          continue;<a name="line.337"></a>
+<span class="sourceLineNo">338</span>        }<a name="line.338"></a>
+<span class="sourceLineNo">339</span>        // Don't send Compaction/Close/Open region events to recovered edit type sinks.<a name="line.339"></a>
+<span class="sourceLineNo">340</span>        if (entry.getEdit().isMetaEdit() &amp;&amp; !outputSink.keepRegionEvent(entry)) {<a name="line.340"></a>
+<span class="sourceLineNo">341</span>          editsSkipped++;<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          continue;<a name="line.342"></a>
+<span class="sourceLineNo">343</span>        }<a name="line.343"></a>
+<span class="sourceLineNo">344</span>        entryBuffers.appendEntry(entry);<a name="line.344"></a>
+<span class="sourceLineNo">345</span>        editsCount++;<a name="line.345"></a>
+<span class="sourceLineNo">346</span>        int moreWritersFromLastCheck = this.getNumOpenWriters() - numOpenedFilesLastCheck;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        // If sufficient edits have passed, check if we should report progress.<a name="line.347"></a>
+<span class="sourceLineNo">348</span>        if (editsCount % interval == 0<a name="line.348"></a>
+<span class="sourceLineNo">349</span>            || moreWritersFromLastCheck &gt; numOpenedFilesBeforeReporting) {<a name="line.349"></a>
+<span class="sourceLineNo">350</span>          numOpenedFilesLastCheck = this.getNumOpenWriters();<a name="line.350"></a>
+<span class="sourceLineNo">351</span>          String countsStr = (editsCount - (editsSkipped + outputSink.getTotalSkippedEdits()))<a name="line.351"></a>
+<span class="sourceLineNo">352</span>              + " edits, skipped " + editsSkipped + " edits.";<a name="line.352"></a>
+<span class="sourceLineNo">353</span>          status.setStatus("Split " + countsStr);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>          if (reporter != null &amp;&amp; !reporter.progress()) {<a name="line.354"></a>
+<span class="sourceLineNo">355</span>            progressFailed = true;<a name="line.355"></a>
+<span class="sourceLineNo">356</span>            return false;<a name="line.356"></a>
+<span class="sourceLineNo">357</span>          }<a name="line.357"></a>
+<span class="sourceLineNo">358</span>        }<a name="line.358"></a>
+<span class="sourceLineNo">359</span>      }<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    } catch (InterruptedException ie) {<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      IOException iie = new InterruptedIOException();<a name="line.361"></a>
+<span class="sourceLineNo">362</span>      iie.initCause(ie);<a name="line.362"></a>
+<span class="sourceLineNo">363</span>      throw iie;<a name="line.363"></a>
+<span class="sourceLineNo">364</span>    } catch (CorruptedLogFileException e) {<a name="line.364"></a>
+<span class="sourceLineNo">365</span>      LOG.warn("Could not parse, corrupted WAL={}", logPath, e);<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      if (splitLogWorkerCoordination != null) {<a name="line.366"></a>
+<span class="sourceLineNo">367</span>        // Some tests pass in a csm of null.<a name="line.367"></a>
+<span class="sourceLineNo">368</span>        splitLogWorkerCoordination.markCorrupted(walDir, logfile.getPath().getName(), walFS);<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      } else {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>        // for tests only<a name="line.370"></a>
+<span class="sourceLineNo">371</span>        ZKSplitLog.markCorrupted(walDir, logfile.getPath().getName(), walFS);<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      }<a name="line.372"></a>
+<span class="sourceLineNo">373</span>      isCorrupted = true;<a name="line.373"></a>
+<span class="sourceLineNo">374</span>    } catch (IOException e) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>      e = e instanceof RemoteException ? ((RemoteException) e).unwrapRemoteException() : e;<a name="line.375"></a>
+<span class="sourceLineNo">376</span>      throw e;<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    } finally {<a name="line.377"></a>
+<span class="sourceLineNo">378</span>      LOG.debug("Finishing writing output logs and closing down");<a name="line.378"></a>
+<span class="sourceLineNo">379</span>      try {<a name="line.379"></a>
+<span class="sourceLineNo">380</span>        if (null != logFileReader) {<a name="line.380"></a>
+<span class="sourceLineNo">381</span>          logFileReader.close();<a name="line.381"></a>
+<span class="sourceLineNo">382</span>        }<a name="line.382"></a>
+<span class="sourceLineNo">383</span>      } catch (IOException exception) {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>        LOG.warn("Could not close WAL reader", exception);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      try {<a name="line.386"></a>
+<span class="sourceLineNo">387</span>        if (outputSinkStarted) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>          // Set progress_failed to true as the immediate following statement will reset its value<a name="line.388"></a>
+<span class="sourceLineNo">389</span>          // when close() throws exception, progress_failed has the right value<a name="line.389"></a>
+<span class="sourceLineNo">390</span>          progressFailed = true;<a name="line.390"></a>
+<span class="sourceLineNo">391</span>          progressFailed = outputSink.close() == null;<a name="line.391"></a>
+<span class="sourceLineNo">392</span>        }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      } finally {<a name="line.393"></a>
+<span class="sourceLineNo">394</span>        long processCost = EnvironmentEdgeManager.currentTime() - startTS;<a name="line.394"></a>
+<span class="sourceLineNo">395</span>        // See if length got updated post lease recovery<a name="line.395"></a>
+<span class="sourceLineNo">396</span>        String msg = "Processed " + editsCount + " edits across " +<a name="line.396"></a>
+<span class="sourceLineNo">397</span>            outputSink.getNumberOfRecoveredRegions() + " regions cost " + processCost +<a name="line.397"></a>
+<span class="sourceLineNo">398</span>            " ms; edits skipped=" + editsSkipped + "; WAL=" + logPath + ", size=" +<a name="line.398"></a>
+<span class="sourceLineNo">399</span>            StringUtils.humanSize(logfile.getLen()) + ", length=" + logfile.getLen() +<a name="line.399"></a>
+<span class="sourceLineNo">400</span>            ", corrupted=" + isCorrupted + ", progress failed=" + progressFailed;<a name="line.400"></a>
+<span class="sourceLineNo">401</span>        LOG.info(msg);<a name="line.401"></a>
+<span class="sourceLineNo">402</span>        status.markComplete(msg);<a name="line.402"></a>
+<span class="sourceLineNo">403</span>      }<a name="line.403"></a>
+<span class="sourceLineNo">404</span>    }<a name="line.404"></a>
+<span class="sourceLineNo">405</span>    return !progressFailed;<a name="line.405"></a>
+<span class="sourceLineNo">406</span>  }<a name="line.406"></a>
+<span class="sourceLineNo">407</span><a name="line.407"></a>
+<span class="sourceLineNo">408</span>  private boolean isRegionDirPresentUnderRoot(TableName tableName, String regionName)<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      throws IOException {<a name="line.409"></a>
+<span class="sourceLineNo">410</span>    Path regionDirPath = CommonFSUtils.getRegionDir(this.rootDir, tableName, regionName);<a name="line.410"></a>
+<span class="sourceLineNo">411</span>    return this.rootFS.exists(regionDirPath);<a name="line.411"></a>
+<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
 <span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>    try {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>      RecoverLeaseFSUtils.recoverFileLease(walFS, path, conf, reporter);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>      try {<a name="line.416"></a>
-<span class="sourceLineNo">417</span>        in = getReader(path, reporter);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      } catch (EOFException e) {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>        if (length &lt;= 0) {<a name="line.419"></a>
-<span class="sourceLineNo">420</span>          // TODO should we ignore an empty, not-last log file if skip.errors<a name="line.420"></a>
-<span class="sourceLineNo">421</span>          // is false? Either way, the caller should decide what to do. E.g.<a name="line.421"></a>
-<span class="sourceLineNo">422</span>          // ignore if this is the last log in sequence.<a name="line.422"></a>
-<span class="sourceLineNo">423</span>          // TODO is this scenario still possible if the log has been<a name="line.423"></a>
-<span class="sourceLineNo">424</span>          // recovered (i.e. closed)<a name="line.424"></a>
-<span class="sourceLineNo">425</span>          LOG.warn("Could not open {} for reading. File is empty", path, e);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        }<a name="line.426"></a>
-<span class="sourceLineNo">427</span>        // EOFException being ignored<a name="line.427"></a>
-<span class="sourceLineNo">428</span>        return null;<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      }<a name="line.429"></a>
-<span class="sourceLineNo">430</span>    } catch (IOException e) {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>      if (e instanceof FileNotFoundException) {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        // A wal file may not exist anymore. Nothing can be recovered so move on<a name="line.432"></a>
-<span class="sourceLineNo">433</span>        LOG.warn("File {} does not exist anymore", path, e);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>        return null;<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      }<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      if (!skipErrors || e instanceof InterruptedIOException) {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>        throw e; // Don't mark the file corrupted if interrupted, or not skipErrors<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      }<a name="line.438"></a>
-<span class="sourceLineNo">439</span>      throw new CorruptedLogFileException("skipErrors=true Could not open wal "<a name="line.439"></a>
-<span class="sourceLineNo">440</span>        + path + " ignoring", e);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>    }<a name="line.441"></a>
-<span class="sourceLineNo">442</span>    return in;<a name="line.442"></a>
-<span class="sourceLineNo">443</span>  }<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>  private Entry getNextLogLine(Reader in, Path path, boolean skipErrors)<a name="line.445"></a>
-<span class="sourceLineNo">446</span>      throws CorruptedLogFileException, IOException {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>    try {<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      return in.next();<a name="line.448"></a>
-<span class="sourceLineNo">449</span>    } catch (EOFException eof) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>      // truncated files are expected if a RS crashes (see HBASE-2643)<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      LOG.info("EOF from wal {}. Continuing.", path);<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      return null;<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    } catch (IOException e) {<a name="line.453"></a>
-<span class="sourceLineNo">454</span>      // If the IOE resulted from bad file format,<a name="line.454"></a>
-<span class="sourceLineNo">455</span>      // then this problem is idempotent and retrying won't help<a name="line.455"></a>
-<span class="sourceLineNo">456</span>      if (e.getCause() != null &amp;&amp; (e.getCause() instanceof ParseException<a name="line.456"></a>
-<span class="sourceLineNo">457</span>          || e.getCause() instanceof org.apache.hadoop.fs.ChecksumException)) {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>        LOG.warn("Parse exception from wal {}. Continuing", path, e);<a name="line.458"></a>
-<span class="sourceLineNo">459</span>        return null;<a name="line.459"></a>
-<span class="sourceLineNo">460</span>      }<a name="line.460"></a>
-<span class="sourceLineNo">461</span>      if (!skipErrors) {<a name="line.461"></a>
-<span class="sourceLineNo">462</span>        throw e;<a name="line.462"></a>
-<span class="sourceLineNo">463</span>      }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      throw new CorruptedLogFileException("skipErrors=true Ignoring exception"<a name="line.464"></a>
-<span class="sourceLineNo">465</span>        + " while parsing wal " + path + ". Marking as corrupted", e);<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    }<a name="line.466"></a>
-<span class="sourceLineNo">467</span>  }<a name="line.467"></a>
-<span class="sourceLineNo">468</span><a name="line.468"></a>
-<span class="sourceLineNo">469</span>  /**<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   * Create a new {@link WALProvider.Writer} for writing log splits.<a name="line.470"></a>
-<span class="sourceLineNo">471</span>   * @return a new Writer instance, caller should close<a name="line.471"></a>
-<span class="sourceLineNo">472</span>   */<a name="line.472"></a>
-<span class="sourceLineNo">473</span>  protected WALProvider.Writer createWriter(Path logfile) throws IOException {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    return walFactory.createRecoveredEditsWriter(walFS, logfile);<a name="line.474"></a>
-<span class="sourceLineNo">475</span>  }<a name="line.475"></a>
-<span class="sourceLineNo">476</span><a name="line.476"></a>
-<span class="sourceLineNo">477</span>  /**<a name="line.477"></a>
-<span class="sourceLineNo">478</span>   * Create a new {@link Reader} for reading logs to split.<a name="line.478"></a>
-<span class="sourceLineNo">479</span>   * @return new Reader instance, caller should close<a name="line.479"></a>
-<span class="sourceLineNo">480</span>   */<a name="line.480"></a>
-<span class="sourceLineNo">481</span>  protected Reader getReader(Path curLogFile, CancelableProgressable reporter) throws IOException {<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    return walFactory.createReader(walFS, curLogFile, reporter);<a name="line.482"></a>
+<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   * Create a new {@link Reader} for reading logs to split.<a name="line.415"></a>
+<span class="sourceLineNo">416</span>   */<a name="line.416"></a>
+<span class="sourceLineNo">417</span>  private Reader getReader(FileStatus file, boolean skipErrors, CancelableProgressable reporter)<a name="line.417"></a>
+<span class="sourceLineNo">418</span>      throws IOException, CorruptedLogFileException {<a name="line.418"></a>
+<span class="sourceLineNo">419</span>    Path path = file.getPath();<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    long length = file.getLen();<a name="line.420"></a>
+<span class="sourceLineNo">421</span>    Reader in;<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>    // Check for possibly empty file. With appends, currently Hadoop reports a<a name="line.423"></a>
+<span class="sourceLineNo">424</span>    // zero length even if the file has been sync'd. Revisit if HDFS-376 or<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    // HDFS-878 is committed.<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    if (length &lt;= 0) {<a name="line.426"></a>
+<span class="sourceLineNo">427</span>      LOG.warn("File {} might be still open, length is 0", path);<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    }<a name="line.428"></a>
+<span class="sourceLineNo">429</span><a name="line.429"></a>
+<span class="sourceLineNo">430</span>    try {<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      RecoverLeaseFSUtils.recoverFileLease(walFS, path, conf, reporter);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>      try {<a name="line.432"></a>
+<span class="sourceLineNo">433</span>        in = getReader(path, reporter);<a name="line.433"></a>
+<span class="sourceLineNo">434</span>      } catch (EOFException e) {<a name="line.434"></a>
+<span class="sourceLineNo">435</span>        if (length &lt;= 0) {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>          // TODO should we ignore an empty, not-last log file if skip.errors<a name="line.436"></a>
+<span class="sourceLineNo">437</span>          // is false? Either way, the caller should decide what to do. E.g.<a name="line.437"></a>
+<span class="sourceLineNo">438</span>          // ignore if this is the last log in sequence.<a name="line.438"></a>
+<span class="sourceLineNo">439</span>          // TODO is this scenario still possible if the log has been<a name="line.439"></a>
+<span class="sourceLineNo">440</span>          // recovered (i.e. closed)<a name="line.440"></a>
+<span class="sourceLineNo">441</span>          LOG.warn("Could not open {} for reading. File is empty", path, e);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        }<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        // EOFException being ignored<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        return null;<a name="line.444"></a>
+<span class="sourceLineNo">445</span>      }<a name="line.445"></a>
+<span class="sourceLineNo">446</span>    } catch (IOException e) {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      if (e instanceof FileNotFoundException) {<a name="line.447"></a>
+<span class="sourceLineNo">448</span>        // A wal file may not exist anymore. Nothing can be recovered so move on<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        LOG.warn("File {} does not exist anymore", path, e);<a name="line.449"></a>
+<span class="sourceLineNo">450</span>        return null;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      }<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      if (!skipErrors || e instanceof InterruptedIOException) {<a name="line.452"></a>
+<span class="sourceLineNo">453</span>        throw e; // Don't mark the file corrupted if interrupted, or not skipErrors<a name="line.453"></a>
+<span class="sourceLineNo">454</span>      }<a name="line.454"></a>
+<span class="sourceLineNo">455</span>      throw new CorruptedLogFileException("skipErrors=true Could not open wal "<a name="line.455"></a>
+<span class="sourceLineNo">456</span>        + path + " ignoring", e);<a name="line.456"></a>
+<span class="sourceLineNo">457</span>    }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    return in;<a name="line.458"></a>
+<span class="sourceLineNo">459</span>  }<a name="line.459"></a>
+<span class="sourceLineNo">460</span><a name="line.460"></a>
+<span class="sourceLineNo">461</span>  private Entry getNextLogLine(Reader in, Path path, boolean skipErrors)<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      throws CorruptedLogFileException, IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span>    try {<a name="line.463"></a>
+<span class="sourceLineNo">464</span>      return in.next();<a name="line.464"></a>
+<span class="sourceLineNo">465</span>    } catch (EOFException eof) {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>      // truncated files are expected if a RS crashes (see HBASE-2643)<a name="line.466"></a>
+<span class="sourceLineNo">467</span>      LOG.info("EOF from wal {}. Continuing.", path);<a name="line.467"></a>
+<span class="sourceLineNo">468</span>      return null;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>    } catch (IOException e) {<a name="line.469"></a>
+<span class="sourceLineNo">470</span>      // If the IOE resulted from bad file format,<a name="line.470"></a>
+<span class="sourceLineNo">471</span>      // then this problem is idempotent and retrying won't help<a name="line.471"></a>
+<span class="sourceLineNo">472</span>      if (e.getCause() != null &amp;&amp; (e.getCause() instanceof ParseException<a name="line.472"></a>
+<span class="sourceLineNo">473</span>          || e.getCause() instanceof org.apache.hadoop.fs.ChecksumException)) {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>        LOG.warn("Parse exception from wal {}. Continuing", path, e);<a name="line.474"></a>
+<span class="sourceLineNo">475</span>        return null;<a name="line.475"></a>
+<span class="sourceLineNo">476</span>      }<a name="line.476"></a>
+<span class="sourceLineNo">477</span>      if (!skipErrors) {<a name="line.477"></a>
+<span class="sourceLineNo">478</span>        throw e;<a name="line.478"></a>
+<span class="sourceLineNo">479</span>      }<a name="line.479"></a>
+<span class="sourceLineNo">480</span>      throw new CorruptedLogFileException("skipErrors=true Ignoring exception"<a name="line.480"></a>
+<span class="sourceLineNo">481</span>        + " while parsing wal " + path + ". Marking as corrupted", e);<a name="line.481"></a>
+<span class="sourceLineNo">482</span>    }<a name="line.482"></a>
 <span class="sourceLineNo">483</span>  }<a name="line.483"></a>
 <span class="sourceLineNo">484</span><a name="line.484"></a>
 <span class="sourceLineNo">485</span>  /**<a name="line.485"></a>
-<span class="sourceLineNo">486</span>   * Get current open writers<a name="line.486"></a>
-<span class="sourceLineNo">487</span>   */<a name="line.487"></a>
-<span class="sourceLineNo">488</span>  private int getNumOpenWriters() {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    int result = 0;<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    if (this.outputSink != null) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      result += this.outputSink.getNumOpenWriters();<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    }<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    return result;<a name="line.493"></a>
-<span class="sourceLineNo">494</span>  }<a name="line.494"></a>
-<span class="sourceLineNo">495</span><a name="line.495"></a>
-<span class="sourceLineNo">496</span>  /**<a name="line.496"></a>
-<span class="sourceLineNo">497</span>   * Contains some methods to control WAL-entries producer / consumer interactions<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   */<a name="line.498"></a>
-<span class="sourceLineNo">499</span>  public static class PipelineController {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>    // If an exception is thrown by one of the other threads, it will be<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    // stored here.<a name="line.501"></a>
-<span class="sourceLineNo">502</span>    AtomicReference&lt;Throwable&gt; thrown = new AtomicReference&lt;&gt;();<a name="line.502"></a>
-<span class="sourceLineNo">503</span><a name="line.503"></a>
-<span class="sourceLineNo">504</span>    // Wait/notify for when data has been produced by the writer thread,<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    // consumed by the reader thread, or an exception occurred<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    final Object dataAvailable = new Object();<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>    void writerThreadError(Throwable t) {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>      thrown.compareAndSet(null, t);<a name="line.509"></a>
-<span class="sourceLineNo">510</span>    }<a name="line.510"></a>
+<span class="sourceLineNo">486</span>   * Create a new {@link WALProvider.Writer} for writing log splits.<a name="line.486"></a>
+<span class="sourceLineNo">487</span>   * @return a new Writer instance, caller should close<a name="line.487"></a>
+<span class="sourceLineNo">488</span>   */<a name="line.488"></a>
+<span class="sourceLineNo">489</span>  protected WALProvider.Writer createWriter(Path logfile) throws IOException {<a name="line.489"></a>
+<span class="sourceLineNo">490</span>    return walFactory.createRecoveredEditsWriter(walFS, logfile);<a name="line.490"></a>
+<span class="sourceLineNo">491</span>  }<a name="line.491"></a>
+<span class="sourceLineNo">492</span><a name="line.492"></a>
+<span class="sourceLineNo">493</span>  /**<a name="line.493"></a>
+<span class="sourceLineNo">494</span>   * Create a new {@link Reader} for reading logs to split.<a name="line.494"></a>
+<span class="sourceLineNo">495</span>   * @return new Reader instance, caller should close<a name="line.495"></a>
+<span class="sourceLineNo">496</span>   */<a name="line.496"></a>
+<span class="sourceLineNo">497</span>  protected Reader getReader(Path curLogFile, CancelableProgressable reporter) throws IOException {<a name="line.497"></a>
+<span class="sourceLineNo">498</span>    return walFactory.createReader(walFS, curLogFile, reporter);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>  }<a name="line.499"></a>
+<span class="sourceLineNo">500</span><a name="line.500"></a>
+<span class="sourceLineNo">501</span>  /**<a name="line.501"></a>
+<span class="sourceLineNo">502</span>   * Get current open writers<a name="line.502"></a>
+<span class="sourceLineNo">503</span>   */<a name="line.503"></a>
+<span class="sourceLineNo">504</span>  private int getNumOpenWriters() {<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    int result = 0;<a name="line.505"></a>
+<span class="sourceLineNo">506</span>    if (this.outputSink != null) {<a name="line.506"></a>
+<span class="sourceLineNo">507</span>      result += this.outputSink.getNumOpenWriters();<a name="line.507"></a>
+<span class="sourceLineNo">508</span>    }<a name="line.508"></a>
+<span class="sourceLineNo">509</span>    return result;<a name="line.509"></a>
+<span class="sourceLineNo">510</span>  }<a name="line.510"></a>
 <span class="sourceLineNo">511</span><a name="line.511"></a>
-<span class="sourceLineNo">512</span>    /**<a name="line.512"></a>
-<span class="sourceLineNo">513</span>     * Check for errors in the writer threads. If any is found, rethrow it.<a name="line.513"></a>
-<span class="sourceLineNo">514</span>     */<a name="line.514"></a>
-<span class="sourceLineNo">515</span>    void checkForErrors() throws IOException {<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      Throwable thrown = this.thrown.get();<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      if (thrown == null) {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>        return;<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>      if (thrown instanceof IOException) {<a name="line.520"></a>
-<span class="sourceLineNo">521</span>        throw new IOException(thrown);<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      } else {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>        throw new RuntimeException(thrown);<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      }<a name="line.524"></a>
-<span class="sourceLineNo">525</span>    }<a name="line.525"></a>
-<span class="sourceLineNo">526</span>  }<a name="line.526"></a>
+<span class="sourceLineNo">512</span>  /**<a name="line.512"></a>
+<span class="sourceLineNo">513</span>   * Contains some methods to control WAL-entries producer / consumer interactions<a name="line.513"></a>
+<span class="sourceLineNo">514</span>   */<a name="line.514"></a>
+<span class="sourceLineNo">515</span>  public static class PipelineController {<a name="line.515"></a>
+<span class="sourceLineNo">516</span>    // If an exception is thrown by one of the other threads, it will be<a name="line.516"></a>
+<span class="sourceLineNo">517</span>    // stored here.<a name="line.517"></a>
+<span class="sourceLineNo">518</span>    AtomicReference&lt;Throwable&gt; thrown = new AtomicReference&lt;&gt;();<a name="line.518"></a>
+<span class="sourceLineNo">519</span><a name="line.519"></a>
+<span class="sourceLineNo">520</span>    // Wait/notify for when data has been produced by the writer thread,<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    // consumed by the reader thread, or an exception occurred<a name="line.521"></a>
+<span class="sourceLineNo">522</span>    final Object dataAvailable = new Object();<a name="line.522"></a>
+<span class="sourceLineNo">523</span><a name="line.523"></a>
+<span class="sourceLineNo">524</span>    void writerThreadError(Throwable t) {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>      thrown.compareAndSet(null, t);<a name="line.525"></a>
+<span class="sourceLineNo">526</span>    }<a name="line.526"></a>
 <span class="sourceLineNo">527</span><a name="line.527"></a>
-<span class="sourceLineNo">528</span>  static class CorruptedLogFileException extends Exception {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    private static final long serialVersionUID = 1L;<a name="line.529"></a>
-<span class="sourceLineNo">530</span><a name="line.530"></a>
-<span class="sourceLineNo">531</span>    CorruptedLogFileException(String s) {<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      super(s);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    }<a name="line.533"></a>
-<span class="sourceLineNo">534</span><a name="line.534"></a>
-<span class="sourceLineNo">535</span>    /**<a name="line.535"></a>
-<span class="sourceLineNo">536</span>     * CorruptedLogFileException with cause<a name="line.536"></a>
-<span class="sourceLineNo">537</span>     *<a name="line.537"></a>
-<span class="sourceLineNo">538</span>     * @param message the message for this exception<a name="line.538"></a>
-<span class="sourceLineNo">539</span>     * @param cause the cause for this exception<a name="line.539"></a>
-<span class="sourceLineNo">540</span>     */<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    CorruptedLogFileException(String message, Throwable cause) {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      super(message, cause);<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    }<a name="line.543"></a>
-<span class="sourceLineNo">544</span>  }<a name="line.544"></a>
-<span class="sourceLineNo">545</span>}<a name="line.545"></a>
+<span class="sourceLineNo">528</span>    /**<a name="line.528"></a>
+<span class="sourceLineNo">529</span>     * Check for errors in the writer threads. If any is found, rethrow it.<a name="line.529"></a>
+<span class="sourceLineNo">530</span>     */<a name="line.530"></a>
+<span class="sourceLineNo">531</span>    void checkForErrors() throws IOException {<a name="line.531"></a>
+<span class="sourceLineNo">532</span>      Throwable thrown = this.thrown.get();<a name="line.532"></a>
+<span class="sourceLineNo">533</span>      if (thrown == null) {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>        return;<a name="line.534"></a>
+<span class="sourceLineNo">535</span>      }<a name="line.535"></a>
+<span class="sourceLineNo">536</span>      if (thrown instanceof IOException) {<a name="line.536"></a>
+<span class="sourceLineNo">537</span>        throw new IOException(thrown);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>      } else {<a name="line.538"></a>
+<span class="sourceLineNo">539</span>        throw new RuntimeException(thrown);<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      }<a name="line.540"></a>
+<span class="sourceLineNo">541</span>    }<a name="line.541"></a>
+<span class="sourceLineNo">542</span>  }<a name="line.542"></a>
+<span class="sourceLineNo">543</span><a name="line.543"></a>
+<span class="sourceLineNo">544</span>  static class CorruptedLogFileException extends Exception {<a name="line.544"></a>
+<span class="sourceLineNo">545</span>    private static final long serialVersionUID = 1L;<a name="line.545"></a>
+<span class="sourceLineNo">546</span><a name="line.546"></a>
+<span class="sourceLineNo">547</span>    CorruptedLogFileException(String s) {<a name="line.547"></a>
+<span class="sourceLineNo">548</span>      super(s);<a name="line.548"></a>
+<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
+<span class="sourceLineNo">550</span><a name="line.550"></a>
+<span class="sourceLineNo">551</span>    /**<a name="line.551"></a>
+<span class="sourceLineNo">552</span>     * CorruptedLogFileException with cause<a name="line.552"></a>
+<span class="sourceLineNo">553</span>     *<a name="line.553"></a>
+<span class="sourceLineNo">554</span>     * @param message the message for this exception<a name="line.554"></a>
+<span class="sourceLineNo">555</span>     * @param cause the cause for this exception<a name="line.555"></a>
+<span class="sourceLineNo">556</span>     */<a name="line.556"></a>
+<span class="sourceLineNo">557</span>    CorruptedLogFileException(String message, Throwable cause) {<a name="line.557"></a>
+<span class="sourceLineNo">558</span>      super(message, cause);<a name="line.558"></a>
+<span class="sourceLineNo">559</span>    }<a name="line.559"></a>
+<span class="sourceLineNo">560</span>  }<a name="line.560"></a>
+<span class="sourceLineNo">561</span>}<a name="line.561"></a>
 
 
 
diff --git a/downloads.html b/downloads.html
index c1c6d9a..324a7ba 100644
--- a/downloads.html
+++ b/downloads.html
@@ -434,7 +434,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/export_control.html b/export_control.html
index 5e51a9f..d62e8bf 100644
--- a/export_control.html
+++ b/export_control.html
@@ -197,7 +197,7 @@ for more details.</p>
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/index.html b/index.html
index b0e5258..c72d3a9 100644
--- a/index.html
+++ b/index.html
@@ -275,7 +275,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/issue-tracking.html b/issue-tracking.html
index dc50f20..f580405 100644
--- a/issue-tracking.html
+++ b/issue-tracking.html
@@ -169,7 +169,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/mail-lists.html b/mail-lists.html
index 4e0f591..49703cb 100644
--- a/mail-lists.html
+++ b/mail-lists.html
@@ -229,7 +229,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/metrics.html b/metrics.html
index 4d08be4..4bee34f 100644
--- a/metrics.html
+++ b/metrics.html
@@ -325,7 +325,7 @@ export HBASE_REGIONSERVER_OPTS=&quot;$HBASE_JMX_OPTS -Dcom.sun.management.jmxrem
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/old_news.html b/old_news.html
index 5811c54..ab62178 100644
--- a/old_news.html
+++ b/old_news.html
@@ -316,7 +316,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/plugin-management.html b/plugin-management.html
index 4981a95..466d97d 100644
--- a/plugin-management.html
+++ b/plugin-management.html
@@ -321,7 +321,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/plugins.html b/plugins.html
index 36ce994..073f9f6 100644
--- a/plugins.html
+++ b/plugins.html
@@ -248,7 +248,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/poweredbyhbase.html b/poweredbyhbase.html
index 582cf3b..fed0d6b 100644
--- a/poweredbyhbase.html
+++ b/poweredbyhbase.html
@@ -650,7 +650,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/project-info.html b/project-info.html
index f968e5f..53d2db2 100644
--- a/project-info.html
+++ b/project-info.html
@@ -210,7 +210,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/project-reports.html b/project-reports.html
index 47310d1..a418b49 100644
--- a/project-reports.html
+++ b/project-reports.html
@@ -186,7 +186,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/project-summary.html b/project-summary.html
index a2d6f2c..48e4582 100644
--- a/project-summary.html
+++ b/project-summary.html
@@ -212,7 +212,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/pseudo-distributed.html b/pseudo-distributed.html
index b11457d..4923c20 100644
--- a/pseudo-distributed.html
+++ b/pseudo-distributed.html
@@ -174,7 +174,7 @@ Running Apache HBase (TM) in pseudo-distributed mode
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/replication.html b/replication.html
index 41a2ca5..07f18c3 100644
--- a/replication.html
+++ b/replication.html
@@ -169,7 +169,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/resources.html b/resources.html
index a5af8a6..a2f923a 100644
--- a/resources.html
+++ b/resources.html
@@ -197,7 +197,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/source-repository.html b/source-repository.html
index 4c60a42..ee7a138 100644
--- a/source-repository.html
+++ b/source-repository.html
@@ -180,7 +180,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/sponsors.html b/sponsors.html
index 67a6ac4..284c747 100644
--- a/sponsors.html
+++ b/sponsors.html
@@ -199,7 +199,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/supportingprojects.html b/supportingprojects.html
index 99d5f1c..784fff3 100644
--- a/supportingprojects.html
+++ b/supportingprojects.html
@@ -390,7 +390,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/team-list.html b/team-list.html
index 9e87f69..43b4499 100644
--- a/team-list.html
+++ b/team-list.html
@@ -701,7 +701,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/testdevapidocs/index-all.html b/testdevapidocs/index-all.html
index c74a8ea..ee7285d 100644
--- a/testdevapidocs/index-all.html
+++ b/testdevapidocs/index-all.html
@@ -57403,6 +57403,8 @@
 <dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/regionserver/TestFailedAppendAndSync.DodgyFSLog.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestFailedAppendAndSync.DodgyFSLog</span></a> - Class in <a href="org/apache/hadoop/hbase/regionserver/package-summary.html">org.apache.hadoop.hbase.regionserver</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#testFailedEvaluateResponse--">testFailedEvaluateResponse()</a></span> - Method in class org.apache.hadoop.hbase.security.<a href="org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html" title="class in org.apache.hadoop.hbase.security">TestHBaseSaslRpcClient</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/rsgroup/TestRSGroupsAdmin2.html#testFailedMoveBeforeRetryExhaustedWhenMoveServer--">testFailedMoveBeforeRetryExhaustedWhenMoveServer()</a></span> - Method in class org.apache.hadoop.hbase.rsgroup.<a href="org/apache/hadoop/hbase/rsgroup/TestRSGroupsAdmin2.html" title="class in org.apache.hadoop.hbase.rsgroup">TestRSGroupsAdmin2</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/rsgroup/TestRSGroupsAdmin2.html#testFailedMoveServersAndRepair--">testFailedMoveServersAndRepair()</a></span> - Method in class org.apache.hadoop.hbase.rsgroup.<a href="org/apache/hadoop/hbase/rsgroup/TestRSGroupsAdmin2.html" title="class in org.apache.hadoop.hbase.rsgroup">TestRSGroupsAdmin2</a></dt>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 88e37d3..fa6650a 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -150,8 +150,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/TestBackupDeleteWithFailures.Failure.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">TestBackupDeleteWithFailures.Failure</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/TestIncrementalBackupMergeWithFailures.FailurePhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">TestIncrementalBackupMergeWithFailures.FailurePhase</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/TestBackupDeleteWithFailures.Failure.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">TestBackupDeleteWithFailures.Failure</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index f32920e..d4d7436 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -165,8 +165,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/TagUsage.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">TagUsage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.CacheOnWriteType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">TestCacheOnWrite.CacheOnWriteType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/TagUsage.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">TagUsage</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
index 74c2e8f..e068180 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -653,18 +653,18 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/RESTApiClusterManager.RoleState.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">RESTApiClusterManager.RoleState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HBaseClusterManager.CommandProvider.Operation</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/RESTApiClusterManager.RoleCommand.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">RESTApiClusterManager.RoleCommand</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ResourceChecker.Phase.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ResourceChecker.Phase</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/RESTApiClusterManager.Service.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">RESTApiClusterManager.Service</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/RESTApiClusterManager.HealthSummary.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">RESTApiClusterManager.HealthSummary</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterManager.ServiceType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/RESTApiClusterManager.RoleCommand.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">RESTApiClusterManager.RoleCommand</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Signal.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HBaseClusterManager.Signal</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/PerformanceEvaluation.Counter.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">PerformanceEvaluation.Counter</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ScanPerformanceEvaluation.ScanCounter.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ScanPerformanceEvaluation.ScanCounter</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.Signal.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HBaseClusterManager.Signal</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.Stat.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">IntegrationTestRegionReplicaPerf.Stat</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/RESTApiClusterManager.RoleState.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">RESTApiClusterManager.RoleState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.ACTION.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">IntegrationTestDDLMasterFailover.ACTION</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/RESTApiClusterManager.Service.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">RESTApiClusterManager.Service</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ResourceChecker.Phase.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ResourceChecker.Phase</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterManager.ServiceType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index 01858e6..16561bb 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -241,10 +241,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/TestProcedureBypass.StuckStateMachineState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">TestProcedureBypass.StuckStateMachineState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/TestStateMachineProcedure.TestSMProcedureState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">TestStateMachineProcedure.TestSMProcedureState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.TestStateMachineProcedure.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">TestProcedureRecovery.TestStateMachineProcedure.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/TestYieldProcedures.TestStateMachineProcedure.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">TestYieldProcedures.TestStateMachineProcedure.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.TestStateMachineProcedure.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">TestProcedureRecovery.TestStateMachineProcedure.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/TestProcedureBypass.StuckStateMachineState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">TestProcedureBypass.StuckStateMachineState</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index f47ffd0..34764d0 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -705,11 +705,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.Manipulation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DataBlockEncodingTool.Manipulation</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestAtomicOperation.TestStep.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestAtomicOperation.TestStep</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.CacheOnWriteType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestCacheOnWriteInSchema.CacheOnWriteType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.Manipulation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DataBlockEncodingTool.Manipulation</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.Metric.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestRegionServerReadRequestMetrics.Metric</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.ActionType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestMultiLogThreshold.ActionType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.CacheOnWriteType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestCacheOnWriteInSchema.CacheOnWriteType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html b/testdevapidocs/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html
index 8fcce27..22910e3 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":9,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10};
+var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":9,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 <li class="blockList">
 <hr>
 <br>
-<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.70">TestHBaseSaslRpcClient</a>
+<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.72">TestHBaseSaslRpcClient</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -251,10 +251,14 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </tr>
 <tr id="i15" class="rowColor">
 <td class="colFirst"><code>void</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#testHBaseSaslRpcClientCreation--">testHBaseSaslRpcClientCreation</a></span>()</code>&nbsp;</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#testFailedEvaluateResponse--">testFailedEvaluateResponse</a></span>()</code>&nbsp;</td>
 </tr>
 <tr id="i16" class="altColor">
 <td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#testHBaseSaslRpcClientCreation--">testHBaseSaslRpcClientCreation</a></span>()</code>&nbsp;</td>
+</tr>
+<tr id="i17" class="rowColor">
+<td class="colFirst"><code>void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#testSaslClientUsesGivenRpcProtection--">testSaslClientUsesGivenRpcProtection</a></span>()</code>&nbsp;</td>
 </tr>
 </table>
@@ -285,7 +289,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CLASS_RULE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.73">CLASS_RULE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.75">CLASS_RULE</a></pre>
 </li>
 </ul>
 <a name="DEFAULT_USER_NAME">
@@ -294,7 +298,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_USER_NAME</h4>
-<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.81">DEFAULT_USER_NAME</a></pre>
+<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.83">DEFAULT_USER_NAME</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.security.TestHBaseSaslRpcClient.DEFAULT_USER_NAME">Constant Field Values</a></dd>
@@ -307,7 +311,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_USER_PASSWORD</h4>
-<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.82">DEFAULT_USER_PASSWORD</a></pre>
+<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.84">DEFAULT_USER_PASSWORD</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.security.TestHBaseSaslRpcClient.DEFAULT_USER_PASSWORD">Constant Field Values</a></dd>
@@ -320,7 +324,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.apache.log4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.84">LOG</a></pre>
+<pre>private static final&nbsp;org.apache.log4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.86">LOG</a></pre>
 </li>
 </ul>
 <a name="exception">
@@ -329,7 +333,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>exception</h4>
-<pre>public&nbsp;org.junit.rules.ExpectedException <a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.88">exception</a></pre>
+<pre>public&nbsp;org.junit.rules.ExpectedException <a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.90">exception</a></pre>
 </li>
 </ul>
 </li>
@@ -346,7 +350,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TestHBaseSaslRpcClient</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.70">TestHBaseSaslRpcClient</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.72">TestHBaseSaslRpcClient</a>()</pre>
 </li>
 </ul>
 </li>
@@ -363,7 +367,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>before</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.91">before</a>()</pre>
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.93">before</a>()</pre>
 </li>
 </ul>
 <a name="testSaslClientUsesGivenRpcProtection--">
@@ -372,7 +376,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testSaslClientUsesGivenRpcProtection</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.96">testSaslClientUsesGivenRpcProtection</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.98">testSaslClientUsesGivenRpcProtection</a>()
                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -386,7 +390,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testDigestSaslClientCallbackHandler</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.113">testDigestSaslClientCallbackHandler</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.115">testDigestSaslClientCallbackHandler</a>()
                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/callback/UnsupportedCallbackException.html?is-external=true" title="class or interface in javax.security.auth.callback">UnsupportedCallbackException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -400,7 +404,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testDigestSaslClientCallbackHandlerWithException</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.132">testDigestSaslClientCallbackHandlerWithException</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.134">testDigestSaslClientCallbackHandlerWithException</a>()</pre>
 </li>
 </ul>
 <a name="testHBaseSaslRpcClientCreation--">
@@ -409,7 +413,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testHBaseSaslRpcClientCreation</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.148">testHBaseSaslRpcClientCreation</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.150">testHBaseSaslRpcClientCreation</a>()
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -423,7 +427,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testAuthMethodReadWrite</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.178">testAuthMethodReadWrite</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.180">testAuthMethodReadWrite</a>()
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -437,7 +441,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>assertAuthMethodRead</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.191">assertAuthMethodRead</a>(org.apache.hadoop.io.DataInputBuffer&nbsp;in,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.193">assertAuthMethodRead</a>(org.apache.hadoop.io.DataInputBuffer&nbsp;in,
                                   org.apache.hadoop.hbase.security.AuthMethod&nbsp;authMethod)
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -452,7 +456,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>assertAuthMethodWrite</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.197">assertAuthMethodWrite</a>(org.apache.hadoop.io.DataOutputBuffer&nbsp;out,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.199">assertAuthMethodWrite</a>(org.apache.hadoop.io.DataOutputBuffer&nbsp;out,
                                    org.apache.hadoop.hbase.security.AuthMethod&nbsp;authMethod)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -467,7 +471,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>assertIOExceptionWhenGetStreamsBeforeConnectCall</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.204">assertIOExceptionWhenGetStreamsBeforeConnectCall</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.206">assertIOExceptionWhenGetStreamsBeforeConnectCall</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal,
                                                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;password)
                                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -482,7 +486,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>assertIOExceptionThenSaslClientIsNull</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.238">assertIOExceptionThenSaslClientIsNull</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.240">assertIOExceptionThenSaslClientIsNull</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal,
                                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;password)</pre>
 </li>
 </ul>
@@ -492,7 +496,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>assertSuccessCreationKerberosPrincipal</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.259">assertSuccessCreationKerberosPrincipal</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal)</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.261">assertSuccessCreationKerberosPrincipal</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal)</pre>
 </li>
 </ul>
 <a name="assertSuccessCreationDigestPrincipal-java.lang.String-java.lang.String-">
@@ -501,7 +505,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>assertSuccessCreationDigestPrincipal</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.269">assertSuccessCreationDigestPrincipal</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.271">assertSuccessCreationDigestPrincipal</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal,
                                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;password)</pre>
 </li>
 </ul>
@@ -511,7 +515,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>assertSuccessCreationSimplePrincipal</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.282">assertSuccessCreationSimplePrincipal</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.284">assertSuccessCreationSimplePrincipal</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal,
                                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;password)</pre>
 </li>
 </ul>
@@ -521,7 +525,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createSaslRpcClientForKerberos</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.security.HBaseSaslRpcClient&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.292">createSaslRpcClientForKerberos</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal)
+<pre>private&nbsp;org.apache.hadoop.hbase.security.HBaseSaslRpcClient&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.294">createSaslRpcClientForKerberos</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal)
                                                                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -535,7 +539,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createTokenMockWithCredentials</h4>
-<pre>private&nbsp;org.apache.hadoop.security.token.Token&lt;? extends org.apache.hadoop.security.token.TokenIdentifier&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.299">createTokenMockWithCredentials</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal,
+<pre>private&nbsp;org.apache.hadoop.security.token.Token&lt;? extends org.apache.hadoop.security.token.TokenIdentifier&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.301">createTokenMockWithCredentials</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal,
                                                                                                                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;password)
                                                                                                                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -550,7 +554,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createSaslRpcClientSimple</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.security.HBaseSaslRpcClient&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.310">createSaslRpcClientSimple</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal,
+<pre>private&nbsp;org.apache.hadoop.hbase.security.HBaseSaslRpcClient&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.312">createSaslRpcClientSimple</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;principal,
                                                                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;password)
                                                                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -562,10 +566,24 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <a name="createTokenMock--">
 <!--   -->
 </a>
-<ul class="blockListLast">
+<ul class="blockList">
 <li class="blockList">
 <h4>createTokenMock</h4>
-<pre>private&nbsp;org.apache.hadoop.security.token.Token&lt;? extends org.apache.hadoop.security.token.TokenIdentifier&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.318">createTokenMock</a>()</pre>
+<pre>private&nbsp;org.apache.hadoop.security.token.Token&lt;? extends org.apache.hadoop.security.token.TokenIdentifier&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.320">createTokenMock</a>()</pre>
+</li>
+</ul>
+<a name="testFailedEvaluateResponse--">
+<!--   -->
+</a>
+<ul class="blockListLast">
+<li class="blockList">
+<h4>testFailedEvaluateResponse</h4>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html#line.325">testFailedEvaluateResponse</a>()
+                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<dl>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
+</dl>
 </li>
 </ul>
 </li>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html
index e51e0d4..0064c42 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html
@@ -254,8 +254,8 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.Verify.Counts.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestBigLinkedList.Verify.Counts</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.Generator.Counts.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestBigLinkedList.Generator.Counts</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.Counters.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestWithCellVisibilityLoadAndVerify.Counters</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.Generator.Counts.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestBigLinkedList.Generator.Counts</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.Counters.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestLoadAndVerify.Counters</span></a></li>
 </ul>
 </li>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html b/testdevapidocs/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html
index 3e8daa6..36793e1 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 <li class="blockList">
 <hr>
 <br>
-<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.89">TestWALSplitToHFile</a>
+<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.87">TestWALSplitToHFile</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -346,7 +346,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CLASS_RULE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.91">CLASS_RULE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.89">CLASS_RULE</a></pre>
 </li>
 </ul>
 <a name="LOG">
@@ -355,7 +355,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.94">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.92">LOG</a></pre>
 </li>
 </ul>
 <a name="UTIL">
@@ -364,7 +364,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>UTIL</h4>
-<pre>static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.95">UTIL</a></pre>
+<pre>static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.93">UTIL</a></pre>
 </li>
 </ul>
 <a name="ee">
@@ -373,7 +373,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>ee</h4>
-<pre>private final&nbsp;org.apache.hadoop.hbase.util.EnvironmentEdge <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.96">ee</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.hbase.util.EnvironmentEdge <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.94">ee</a></pre>
 </li>
 </ul>
 <a name="rootDir">
@@ -382,7 +382,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>rootDir</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.97">rootDir</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.95">rootDir</a></pre>
 </li>
 </ul>
 <a name="logName">
@@ -391,7 +391,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>logName</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.98">logName</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.96">logName</a></pre>
 </li>
 </ul>
 <a name="oldLogDir">
@@ -400,7 +400,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>oldLogDir</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.99">oldLogDir</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.97">oldLogDir</a></pre>
 </li>
 </ul>
 <a name="logDir">
@@ -409,7 +409,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>logDir</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.100">logDir</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.Path <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.98">logDir</a></pre>
 </li>
 </ul>
 <a name="fs">
@@ -418,7 +418,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>fs</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.101">fs</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.99">fs</a></pre>
 </li>
 </ul>
 <a name="conf">
@@ -427,7 +427,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>private&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.102">conf</a></pre>
+<pre>private&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.100">conf</a></pre>
 </li>
 </ul>
 <a name="wals">
@@ -436,7 +436,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>wals</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.wal.WALFactory <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.103">wals</a></pre>
+<pre>private&nbsp;org.apache.hadoop.hbase.wal.WALFactory <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.101">wals</a></pre>
 </li>
 </ul>
 <a name="ROW">
@@ -445,7 +445,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>ROW</h4>
-<pre>private static final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.105">ROW</a></pre>
+<pre>private static final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.103">ROW</a></pre>
 </li>
 </ul>
 <a name="QUALIFIER">
@@ -454,7 +454,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>QUALIFIER</h4>
-<pre>private static final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.106">QUALIFIER</a></pre>
+<pre>private static final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.104">QUALIFIER</a></pre>
 </li>
 </ul>
 <a name="VALUE1">
@@ -463,7 +463,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>VALUE1</h4>
-<pre>private static final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.107">VALUE1</a></pre>
+<pre>private static final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.105">VALUE1</a></pre>
 </li>
 </ul>
 <a name="VALUE2">
@@ -472,7 +472,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>VALUE2</h4>
-<pre>private static final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.108">VALUE2</a></pre>
+<pre>private static final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.106">VALUE2</a></pre>
 </li>
 </ul>
 <a name="countPerFamily">
@@ -481,7 +481,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>countPerFamily</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.109">countPerFamily</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.107">countPerFamily</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.wal.TestWALSplitToHFile.countPerFamily">Constant Field Values</a></dd>
@@ -494,7 +494,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TEST_NAME</h4>
-<pre>public final&nbsp;org.junit.rules.TestName <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.112">TEST_NAME</a></pre>
+<pre>public final&nbsp;org.junit.rules.TestName <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.110">TEST_NAME</a></pre>
 </li>
 </ul>
 </li>
@@ -511,7 +511,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TestWALSplitToHFile</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.89">TestWALSplitToHFile</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.87">TestWALSplitToHFile</a>()</pre>
 </li>
 </ul>
 </li>
@@ -528,7 +528,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setUpBeforeClass</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.115">setUpBeforeClass</a>()
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.113">setUpBeforeClass</a>()
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -542,7 +542,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tearDownAfterClass</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.125">tearDownAfterClass</a>()
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.123">tearDownAfterClass</a>()
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -556,7 +556,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setUp</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.130">setUp</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.128">setUp</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -570,7 +570,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tearDown</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.148">tearDown</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.146">tearDown</a>()
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -584,7 +584,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteDir</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.156">deleteDir</a>(org.apache.hadoop.fs.Path&nbsp;p)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.154">deleteDir</a>(org.apache.hadoop.fs.Path&nbsp;p)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -598,7 +598,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createBasic3FamilyTD</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.client.TableDescriptor&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.164">createBasic3FamilyTD</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName)
+<pre>private&nbsp;org.apache.hadoop.hbase.client.TableDescriptor&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.162">createBasic3FamilyTD</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName)
                                                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -612,7 +612,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createWAL</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.wal.WAL&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.174">createWAL</a>(org.apache.hadoop.conf.Configuration&nbsp;c,
+<pre>private&nbsp;org.apache.hadoop.hbase.wal.WAL&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.172">createWAL</a>(org.apache.hadoop.conf.Configuration&nbsp;c,
                                                   org.apache.hadoop.fs.Path&nbsp;hbaseRootDir,
                                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;logName)
                                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -628,7 +628,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createWAL</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.wal.WAL&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.180">createWAL</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>private&nbsp;org.apache.hadoop.hbase.wal.WAL&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.178">createWAL</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                                   org.apache.hadoop.fs.Path&nbsp;hbaseRootDir,
                                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;logName)
                                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -644,7 +644,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setupTableAndRegion</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.util.Pair&lt;org.apache.hadoop.hbase.client.TableDescriptor,org.apache.hadoop.hbase.client.RegionInfo&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.186">setupTableAndRegion</a>()
+<pre>private&nbsp;org.apache.hadoop.hbase.util.Pair&lt;org.apache.hadoop.hbase.client.TableDescriptor,org.apache.hadoop.hbase.client.RegionInfo&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.184">setupTableAndRegion</a>()
                                                                                                                                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -658,7 +658,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>writeData</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.198">writeData</a>(org.apache.hadoop.hbase.client.TableDescriptor&nbsp;td,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.196">writeData</a>(org.apache.hadoop.hbase.client.TableDescriptor&nbsp;td,
                        org.apache.hadoop.hbase.regionserver.HRegion&nbsp;region)
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -673,7 +673,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testDifferentRootDirAndWALRootDir</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.206">testDifferentRootDirAndWALRootDir</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.204">testDifferentRootDirAndWALRootDir</a>()
                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -687,7 +687,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testCorruptRecoveredHFile</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.244">testCorruptRecoveredHFile</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.242">testCorruptRecoveredHFile</a>()
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -701,7 +701,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testPutWithSameTimestamp</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.296">testPutWithSameTimestamp</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.294">testPutWithSameTimestamp</a>()
                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -715,7 +715,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testRecoverSequenceId</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.331">testRecoverSequenceId</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.329">testRecoverSequenceId</a>()
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -729,7 +729,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testWrittenViaHRegion</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.379">testWrittenViaHRegion</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.377">testWrittenViaHRegion</a>()
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/SecurityException.html?is-external=true" title="class or interface in java.lang">SecurityException</a>,
                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/IllegalArgumentException.html?is-external=true" title="class or interface in java.lang">IllegalArgumentException</a>,
@@ -751,7 +751,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testAfterPartialFlush</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.467">testAfterPartialFlush</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.465">testAfterPartialFlush</a>()
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/SecurityException.html?is-external=true" title="class or interface in java.lang">SecurityException</a>,
                                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/IllegalArgumentException.html?is-external=true" title="class or interface in java.lang">IllegalArgumentException</a></pre>
@@ -777,7 +777,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testAfterAbortingFlush</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.522">testAfterAbortingFlush</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.520">testAfterAbortingFlush</a>()
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Test that we could recover the data correctly after aborting flush. In the
  test, first we abort flush after writing some data, then writing more data
@@ -794,7 +794,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getScannedCount</h4>
-<pre>private&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.594">getScannedCount</a>(org.apache.hadoop.hbase.regionserver.RegionScanner&nbsp;scanner)
+<pre>private&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.592">getScannedCount</a>(org.apache.hadoop.hbase.regionserver.RegionScanner&nbsp;scanner)
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -808,7 +808,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>writeCorruptRecoveredHFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.610">writeCorruptRecoveredHFile</a>(org.apache.hadoop.fs.Path&nbsp;recoveredHFile)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html#line.608">writeCorruptRecoveredHFile</a>(org.apache.hadoop.fs.Path&nbsp;recoveredHFile)
                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html
index 7e15a4e..0855470 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.html
@@ -60,273 +60,304 @@
 <span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.52"></a>
 <span class="sourceLineNo">053</span>import org.apache.hadoop.io.DataInputBuffer;<a name="line.53"></a>
 <span class="sourceLineNo">054</span>import org.apache.hadoop.io.DataOutputBuffer;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.security.token.Token;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.security.token.TokenIdentifier;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.log4j.Level;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.log4j.Logger;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.junit.BeforeClass;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.junit.ClassRule;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.junit.Rule;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.junit.Test;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.junit.experimental.categories.Category;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.junit.rules.ExpectedException;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.mockito.Mockito;<a name="line.65"></a>
-<span class="sourceLineNo">066</span><a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hbase.thirdparty.com.google.common.base.Strings;<a name="line.67"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.io.WritableUtils;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.security.token.Token;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.security.token.TokenIdentifier;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.log4j.Level;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.log4j.Logger;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.junit.Assert;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.junit.BeforeClass;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.junit.ClassRule;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.junit.Rule;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.junit.Test;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.junit.experimental.categories.Category;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.junit.rules.ExpectedException;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.mockito.Mockito;<a name="line.67"></a>
 <span class="sourceLineNo">068</span><a name="line.68"></a>
-<span class="sourceLineNo">069</span>@Category({SecurityTests.class, SmallTests.class})<a name="line.69"></a>
-<span class="sourceLineNo">070</span>public class TestHBaseSaslRpcClient {<a name="line.70"></a>
-<span class="sourceLineNo">071</span><a name="line.71"></a>
-<span class="sourceLineNo">072</span>  @ClassRule<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      HBaseClassTestRule.forClass(TestHBaseSaslRpcClient.class);<a name="line.74"></a>
-<span class="sourceLineNo">075</span><a name="line.75"></a>
-<span class="sourceLineNo">076</span>  static {<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    System.setProperty("java.security.krb5.realm", "DOMAIN.COM");<a name="line.77"></a>
-<span class="sourceLineNo">078</span>    System.setProperty("java.security.krb5.kdc", "DOMAIN.COM");<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  }<a name="line.79"></a>
-<span class="sourceLineNo">080</span><a name="line.80"></a>
-<span class="sourceLineNo">081</span>  static final String DEFAULT_USER_NAME = "principal";<a name="line.81"></a>
-<span class="sourceLineNo">082</span>  static final String DEFAULT_USER_PASSWORD = "password";<a name="line.82"></a>
-<span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>  private static final Logger LOG = Logger.getLogger(TestHBaseSaslRpcClient.class);<a name="line.84"></a>
+<span class="sourceLineNo">069</span>import org.apache.hbase.thirdparty.com.google.common.base.Strings;<a name="line.69"></a>
+<span class="sourceLineNo">070</span><a name="line.70"></a>
+<span class="sourceLineNo">071</span>@Category({SecurityTests.class, SmallTests.class})<a name="line.71"></a>
+<span class="sourceLineNo">072</span>public class TestHBaseSaslRpcClient {<a name="line.72"></a>
+<span class="sourceLineNo">073</span><a name="line.73"></a>
+<span class="sourceLineNo">074</span>  @ClassRule<a name="line.74"></a>
+<span class="sourceLineNo">075</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.75"></a>
+<span class="sourceLineNo">076</span>      HBaseClassTestRule.forClass(TestHBaseSaslRpcClient.class);<a name="line.76"></a>
+<span class="sourceLineNo">077</span><a name="line.77"></a>
+<span class="sourceLineNo">078</span>  static {<a name="line.78"></a>
+<span class="sourceLineNo">079</span>    System.setProperty("java.security.krb5.realm", "DOMAIN.COM");<a name="line.79"></a>
+<span class="sourceLineNo">080</span>    System.setProperty("java.security.krb5.kdc", "DOMAIN.COM");<a name="line.80"></a>
+<span class="sourceLineNo">081</span>  }<a name="line.81"></a>
+<span class="sourceLineNo">082</span><a name="line.82"></a>
+<span class="sourceLineNo">083</span>  static final String DEFAULT_USER_NAME = "principal";<a name="line.83"></a>
+<span class="sourceLineNo">084</span>  static final String DEFAULT_USER_PASSWORD = "password";<a name="line.84"></a>
 <span class="sourceLineNo">085</span><a name="line.85"></a>
-<span class="sourceLineNo">086</span><a name="line.86"></a>
-<span class="sourceLineNo">087</span>  @Rule<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  public ExpectedException exception = ExpectedException.none();<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>  @BeforeClass<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  public static void before() {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    Logger.getRootLogger().setLevel(Level.DEBUG);<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  }<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  @Test<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  public void testSaslClientUsesGivenRpcProtection() throws Exception {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    Token&lt;? extends TokenIdentifier&gt; token = createTokenMockWithCredentials(DEFAULT_USER_NAME,<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        DEFAULT_USER_PASSWORD);<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    DigestSaslClientAuthenticationProvider provider = new DigestSaslClientAuthenticationProvider();<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    for (SaslUtil.QualityOfProtection qop : SaslUtil.QualityOfProtection.values()) {<a name="line.100"></a>
-<span class="sourceLineNo">101</span>      String negotiatedQop = new HBaseSaslRpcClient(HBaseConfiguration.create(), provider, token,<a name="line.101"></a>
-<span class="sourceLineNo">102</span>          Mockito.mock(InetAddress.class), Mockito.mock(SecurityInfo.class), false, qop.name(),<a name="line.102"></a>
-<span class="sourceLineNo">103</span>          false) {<a name="line.103"></a>
-<span class="sourceLineNo">104</span>        public String getQop() {<a name="line.104"></a>
-<span class="sourceLineNo">105</span>          return saslProps.get(Sasl.QOP);<a name="line.105"></a>
-<span class="sourceLineNo">106</span>        }<a name="line.106"></a>
-<span class="sourceLineNo">107</span>      }.getQop();<a name="line.107"></a>
-<span class="sourceLineNo">108</span>      assertEquals(negotiatedQop, qop.getSaslQop());<a name="line.108"></a>
-<span class="sourceLineNo">109</span>    }<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  }<a name="line.110"></a>
-<span class="sourceLineNo">111</span><a name="line.111"></a>
-<span class="sourceLineNo">112</span>  @Test<a name="line.112"></a>
-<span class="sourceLineNo">113</span>  public void testDigestSaslClientCallbackHandler() throws UnsupportedCallbackException {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    final Token&lt;? extends TokenIdentifier&gt; token = createTokenMock();<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME));<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD));<a name="line.116"></a>
-<span class="sourceLineNo">117</span><a name="line.117"></a>
-<span class="sourceLineNo">118</span>    final NameCallback nameCallback = mock(NameCallback.class);<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    final PasswordCallback passwordCallback = mock(PasswordCallback.class);<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    final RealmCallback realmCallback = mock(RealmCallback.class);<a name="line.120"></a>
-<span class="sourceLineNo">121</span><a name="line.121"></a>
-<span class="sourceLineNo">122</span>    // We can provide a realmCallback, but HBase presently does nothing with it.<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    Callback[] callbackArray = {nameCallback, passwordCallback, realmCallback};<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    final DigestSaslClientCallbackHandler saslClCallbackHandler =<a name="line.124"></a>
-<span class="sourceLineNo">125</span>        new DigestSaslClientCallbackHandler(token);<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    saslClCallbackHandler.handle(callbackArray);<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    verify(nameCallback).setName(anyString());<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    verify(passwordCallback).setPassword(any());<a name="line.128"></a>
-<span class="sourceLineNo">129</span>  }<a name="line.129"></a>
-<span class="sourceLineNo">130</span><a name="line.130"></a>
-<span class="sourceLineNo">131</span>  @Test<a name="line.131"></a>
-<span class="sourceLineNo">132</span>  public void testDigestSaslClientCallbackHandlerWithException() {<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    final Token&lt;? extends TokenIdentifier&gt; token = createTokenMock();<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME));<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD));<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    final DigestSaslClientCallbackHandler saslClCallbackHandler =<a name="line.136"></a>
-<span class="sourceLineNo">137</span>        new DigestSaslClientCallbackHandler(token);<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    try {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      saslClCallbackHandler.handle(new Callback[] { mock(TextOutputCallback.class) });<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    } catch (UnsupportedCallbackException expEx) {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      //expected<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    } catch (Exception ex) {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      fail("testDigestSaslClientCallbackHandlerWithException error : " + ex.getMessage());<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    }<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  @Test<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  public void testHBaseSaslRpcClientCreation() throws Exception {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    //creation kerberos principal check section<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    assertFalse(assertSuccessCreationKerberosPrincipal(null));<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    assertFalse(assertSuccessCreationKerberosPrincipal("DOMAIN.COM"));<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    assertFalse(assertSuccessCreationKerberosPrincipal("principal/DOMAIN.COM"));<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    if (!assertSuccessCreationKerberosPrincipal("principal/localhost@DOMAIN.COM")) {<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      // XXX: This can fail if kerberos support in the OS is not sane, see HBASE-10107.<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      // For now, don't assert, just warn<a name="line.155"></a>
-<span class="sourceLineNo">156</span>      LOG.warn("Could not create a SASL client with valid Kerberos credential");<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    }<a name="line.157"></a>
-<span class="sourceLineNo">158</span><a name="line.158"></a>
-<span class="sourceLineNo">159</span>    //creation digest principal check section<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    assertFalse(assertSuccessCreationDigestPrincipal(null, null));<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    assertFalse(assertSuccessCreationDigestPrincipal("", ""));<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    assertFalse(assertSuccessCreationDigestPrincipal("", null));<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    assertFalse(assertSuccessCreationDigestPrincipal(null, ""));<a name="line.163"></a>
-<span class="sourceLineNo">164</span>    assertTrue(assertSuccessCreationDigestPrincipal(DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>    //creation simple principal check section<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    assertFalse(assertSuccessCreationSimplePrincipal("", ""));<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    assertFalse(assertSuccessCreationSimplePrincipal(null, null));<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    assertFalse(assertSuccessCreationSimplePrincipal(DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));<a name="line.169"></a>
-<span class="sourceLineNo">170</span><a name="line.170"></a>
-<span class="sourceLineNo">171</span>    //exceptions check section<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    assertTrue(assertIOExceptionThenSaslClientIsNull(DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));<a name="line.172"></a>
-<span class="sourceLineNo">173</span>    assertTrue(assertIOExceptionWhenGetStreamsBeforeConnectCall(<a name="line.173"></a>
-<span class="sourceLineNo">174</span>        DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  }<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>  @Test<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  public void testAuthMethodReadWrite() throws IOException {<a name="line.178"></a>
-<span class="sourceLineNo">179</span>    DataInputBuffer in = new DataInputBuffer();<a name="line.179"></a>
-<span class="sourceLineNo">180</span>    DataOutputBuffer out = new DataOutputBuffer();<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>    assertAuthMethodRead(in, AuthMethod.SIMPLE);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    assertAuthMethodRead(in, AuthMethod.KERBEROS);<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    assertAuthMethodRead(in, AuthMethod.DIGEST);<a name="line.184"></a>
-<span class="sourceLineNo">185</span><a name="line.185"></a>
-<span class="sourceLineNo">186</span>    assertAuthMethodWrite(out, AuthMethod.SIMPLE);<a name="line.186"></a>
-<span class="sourceLineNo">187</span>    assertAuthMethodWrite(out, AuthMethod.KERBEROS);<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    assertAuthMethodWrite(out, AuthMethod.DIGEST);<a name="line.188"></a>
-<span class="sourceLineNo">189</span>  }<a name="line.189"></a>
-<span class="sourceLineNo">190</span><a name="line.190"></a>
-<span class="sourceLineNo">191</span>  private void assertAuthMethodRead(DataInputBuffer in, AuthMethod authMethod)<a name="line.191"></a>
-<span class="sourceLineNo">192</span>      throws IOException {<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    in.reset(new byte[] {authMethod.code}, 1);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    assertEquals(authMethod, AuthMethod.read(in));<a name="line.194"></a>
-<span class="sourceLineNo">195</span>  }<a name="line.195"></a>
-<span class="sourceLineNo">196</span><a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private void assertAuthMethodWrite(DataOutputBuffer out, AuthMethod authMethod)<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      throws IOException {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    authMethod.write(out);<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    assertEquals(authMethod.code, out.getData()[0]);<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    out.reset();<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  }<a name="line.202"></a>
-<span class="sourceLineNo">203</span><a name="line.203"></a>
-<span class="sourceLineNo">204</span>  private boolean assertIOExceptionWhenGetStreamsBeforeConnectCall(String principal,<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      String password) throws IOException {<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    boolean inState = false;<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    boolean outState = false;<a name="line.207"></a>
-<span class="sourceLineNo">208</span><a name="line.208"></a>
-<span class="sourceLineNo">209</span>    DigestSaslClientAuthenticationProvider provider = new DigestSaslClientAuthenticationProvider() {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      @Override<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      public SaslClient createClient(Configuration conf, InetAddress serverAddress,<a name="line.211"></a>
-<span class="sourceLineNo">212</span>          SecurityInfo securityInfo, Token&lt;? extends TokenIdentifier&gt; token,<a name="line.212"></a>
-<span class="sourceLineNo">213</span>          boolean fallbackAllowed, Map&lt;String, String&gt; saslProps) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        return Mockito.mock(SaslClient.class);<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      }<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    };<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    HBaseSaslRpcClient rpcClient = new HBaseSaslRpcClient(HBaseConfiguration.create(), provider,<a name="line.217"></a>
-<span class="sourceLineNo">218</span>        createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class),<a name="line.218"></a>
-<span class="sourceLineNo">219</span>        Mockito.mock(SecurityInfo.class), false);<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>    try {<a name="line.221"></a>
-<span class="sourceLineNo">222</span>      rpcClient.getInputStream();<a name="line.222"></a>
-<span class="sourceLineNo">223</span>    } catch(IOException ex) {<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      //Sasl authentication exchange hasn't completed yet<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      inState = true;<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    }<a name="line.226"></a>
-<span class="sourceLineNo">227</span><a name="line.227"></a>
-<span class="sourceLineNo">228</span>    try {<a name="line.228"></a>
-<span class="sourceLineNo">229</span>      rpcClient.getOutputStream();<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    } catch(IOException ex) {<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      //Sasl authentication exchange hasn't completed yet<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      outState = true;<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    }<a name="line.233"></a>
-<span class="sourceLineNo">234</span><a name="line.234"></a>
-<span class="sourceLineNo">235</span>    return inState &amp;&amp; outState;<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  }<a name="line.236"></a>
-<span class="sourceLineNo">237</span><a name="line.237"></a>
-<span class="sourceLineNo">238</span>  private boolean assertIOExceptionThenSaslClientIsNull(String principal, String password) {<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    try {<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      DigestSaslClientAuthenticationProvider provider =<a name="line.240"></a>
-<span class="sourceLineNo">241</span>          new DigestSaslClientAuthenticationProvider() {<a name="line.241"></a>
-<span class="sourceLineNo">242</span>        @Override<a name="line.242"></a>
-<span class="sourceLineNo">243</span>        public SaslClient createClient(Configuration conf, InetAddress serverAddress,<a name="line.243"></a>
-<span class="sourceLineNo">244</span>            SecurityInfo securityInfo,<a name="line.244"></a>
-<span class="sourceLineNo">245</span>            Token&lt;? extends TokenIdentifier&gt; token, boolean fallbackAllowed,<a name="line.245"></a>
-<span class="sourceLineNo">246</span>            Map&lt;String, String&gt; saslProps) {<a name="line.246"></a>
-<span class="sourceLineNo">247</span>          return null;<a name="line.247"></a>
-<span class="sourceLineNo">248</span>        }<a name="line.248"></a>
-<span class="sourceLineNo">249</span>      };<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      new HBaseSaslRpcClient(HBaseConfiguration.create(), provider,<a name="line.250"></a>
-<span class="sourceLineNo">251</span>          createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class),<a name="line.251"></a>
-<span class="sourceLineNo">252</span>          Mockito.mock(SecurityInfo.class), false);<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      return false;<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    } catch (IOException ex) {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      return true;<a name="line.255"></a>
-<span class="sourceLineNo">256</span>    }<a name="line.256"></a>
-<span class="sourceLineNo">257</span>  }<a name="line.257"></a>
-<span class="sourceLineNo">258</span><a name="line.258"></a>
-<span class="sourceLineNo">259</span>  private boolean assertSuccessCreationKerberosPrincipal(String principal) {<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    HBaseSaslRpcClient rpcClient = null;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    try {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      rpcClient = createSaslRpcClientForKerberos(principal);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    } catch(Exception ex) {<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      LOG.error(ex.getMessage(), ex);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    }<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    return rpcClient != null;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>  }<a name="line.267"></a>
-<span class="sourceLineNo">268</span><a name="line.268"></a>
-<span class="sourceLineNo">269</span>  private boolean assertSuccessCreationDigestPrincipal(String principal, String password) {<a name="line.269"></a>
-<span class="sourceLineNo">270</span>    HBaseSaslRpcClient rpcClient = null;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    try {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      rpcClient = new HBaseSaslRpcClient(HBaseConfiguration.create(),<a name="line.272"></a>
-<span class="sourceLineNo">273</span>          new DigestSaslClientAuthenticationProvider(),<a name="line.273"></a>
-<span class="sourceLineNo">274</span>          createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class),<a name="line.274"></a>
-<span class="sourceLineNo">275</span>          Mockito.mock(SecurityInfo.class), false);<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    } catch(Exception ex) {<a name="line.276"></a>
-<span class="sourceLineNo">277</span>      LOG.error(ex.getMessage(), ex);<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    }<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    return rpcClient != null;<a name="line.279"></a>
-<span class="sourceLineNo">280</span>  }<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>  private boolean assertSuccessCreationSimplePrincipal(String principal, String password) {<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    HBaseSaslRpcClient rpcClient = null;<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    try {<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      rpcClient = createSaslRpcClientSimple(principal, password);<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    } catch(Exception ex) {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>      LOG.error(ex.getMessage(), ex);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    }<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    return rpcClient != null;<a name="line.289"></a>
-<span class="sourceLineNo">290</span>  }<a name="line.290"></a>
-<span class="sourceLineNo">291</span><a name="line.291"></a>
-<span class="sourceLineNo">292</span>  private HBaseSaslRpcClient createSaslRpcClientForKerberos(String principal)<a name="line.292"></a>
-<span class="sourceLineNo">293</span>      throws IOException {<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return new HBaseSaslRpcClient(HBaseConfiguration.create(),<a name="line.294"></a>
-<span class="sourceLineNo">295</span>        new GssSaslClientAuthenticationProvider(), createTokenMock(),<a name="line.295"></a>
-<span class="sourceLineNo">296</span>        Mockito.mock(InetAddress.class), Mockito.mock(SecurityInfo.class), false);<a name="line.296"></a>
-<span class="sourceLineNo">297</span>  }<a name="line.297"></a>
-<span class="sourceLineNo">298</span><a name="line.298"></a>
-<span class="sourceLineNo">299</span>  private Token&lt;? extends TokenIdentifier&gt; createTokenMockWithCredentials(<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      String principal, String password)<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      throws IOException {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>    Token&lt;? extends TokenIdentifier&gt; token = createTokenMock();<a name="line.302"></a>
-<span class="sourceLineNo">303</span>    if (!Strings.isNullOrEmpty(principal) &amp;&amp; !Strings.isNullOrEmpty(password)) {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME));<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD));<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    return token;<a name="line.307"></a>
-<span class="sourceLineNo">308</span>  }<a name="line.308"></a>
-<span class="sourceLineNo">309</span><a name="line.309"></a>
-<span class="sourceLineNo">310</span>  private HBaseSaslRpcClient createSaslRpcClientSimple(String principal, String password)<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      throws IOException {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    return new HBaseSaslRpcClient(HBaseConfiguration.create(),<a name="line.312"></a>
-<span class="sourceLineNo">313</span>        new SimpleSaslClientAuthenticationProvider(), createTokenMock(),<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        Mockito.mock(InetAddress.class), Mockito.mock(SecurityInfo.class), false);<a name="line.314"></a>
-<span class="sourceLineNo">315</span>  }<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>  @SuppressWarnings("unchecked")<a name="line.317"></a>
-<span class="sourceLineNo">318</span>  private Token&lt;? extends TokenIdentifier&gt; createTokenMock() {<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    return mock(Token.class);<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  }<a name="line.320"></a>
-<span class="sourceLineNo">321</span>}<a name="line.321"></a>
+<span class="sourceLineNo">086</span>  private static final Logger LOG = Logger.getLogger(TestHBaseSaslRpcClient.class);<a name="line.86"></a>
+<span class="sourceLineNo">087</span><a name="line.87"></a>
+<span class="sourceLineNo">088</span><a name="line.88"></a>
+<span class="sourceLineNo">089</span>  @Rule<a name="line.89"></a>
+<span class="sourceLineNo">090</span>  public ExpectedException exception = ExpectedException.none();<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>  @BeforeClass<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  public static void before() {<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    Logger.getRootLogger().setLevel(Level.DEBUG);<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  }<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>  @Test<a name="line.97"></a>
+<span class="sourceLineNo">098</span>  public void testSaslClientUsesGivenRpcProtection() throws Exception {<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    Token&lt;? extends TokenIdentifier&gt; token = createTokenMockWithCredentials(DEFAULT_USER_NAME,<a name="line.99"></a>
+<span class="sourceLineNo">100</span>        DEFAULT_USER_PASSWORD);<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    DigestSaslClientAuthenticationProvider provider = new DigestSaslClientAuthenticationProvider();<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    for (SaslUtil.QualityOfProtection qop : SaslUtil.QualityOfProtection.values()) {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>      String negotiatedQop = new HBaseSaslRpcClient(HBaseConfiguration.create(), provider, token,<a name="line.103"></a>
+<span class="sourceLineNo">104</span>          Mockito.mock(InetAddress.class), Mockito.mock(SecurityInfo.class), false, qop.name(),<a name="line.104"></a>
+<span class="sourceLineNo">105</span>          false) {<a name="line.105"></a>
+<span class="sourceLineNo">106</span>        public String getQop() {<a name="line.106"></a>
+<span class="sourceLineNo">107</span>          return saslProps.get(Sasl.QOP);<a name="line.107"></a>
+<span class="sourceLineNo">108</span>        }<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      }.getQop();<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      assertEquals(negotiatedQop, qop.getSaslQop());<a name="line.110"></a>
+<span class="sourceLineNo">111</span>    }<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  }<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  @Test<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  public void testDigestSaslClientCallbackHandler() throws UnsupportedCallbackException {<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    final Token&lt;? extends TokenIdentifier&gt; token = createTokenMock();<a name="line.116"></a>
+<span class="sourceLineNo">117</span>    when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME));<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD));<a name="line.118"></a>
+<span class="sourceLineNo">119</span><a name="line.119"></a>
+<span class="sourceLineNo">120</span>    final NameCallback nameCallback = mock(NameCallback.class);<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    final PasswordCallback passwordCallback = mock(PasswordCallback.class);<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    final RealmCallback realmCallback = mock(RealmCallback.class);<a name="line.122"></a>
+<span class="sourceLineNo">123</span><a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // We can provide a realmCallback, but HBase presently does nothing with it.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    Callback[] callbackArray = {nameCallback, passwordCallback, realmCallback};<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    final DigestSaslClientCallbackHandler saslClCallbackHandler =<a name="line.126"></a>
+<span class="sourceLineNo">127</span>        new DigestSaslClientCallbackHandler(token);<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    saslClCallbackHandler.handle(callbackArray);<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    verify(nameCallback).setName(anyString());<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    verify(passwordCallback).setPassword(any());<a name="line.130"></a>
+<span class="sourceLineNo">131</span>  }<a name="line.131"></a>
+<span class="sourceLineNo">132</span><a name="line.132"></a>
+<span class="sourceLineNo">133</span>  @Test<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  public void testDigestSaslClientCallbackHandlerWithException() {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    final Token&lt;? extends TokenIdentifier&gt; token = createTokenMock();<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME));<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD));<a name="line.137"></a>
+<span class="sourceLineNo">138</span>    final DigestSaslClientCallbackHandler saslClCallbackHandler =<a name="line.138"></a>
+<span class="sourceLineNo">139</span>        new DigestSaslClientCallbackHandler(token);<a name="line.139"></a>
+<span class="sourceLineNo">140</span>    try {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>      saslClCallbackHandler.handle(new Callback[] { mock(TextOutputCallback.class) });<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    } catch (UnsupportedCallbackException expEx) {<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      //expected<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    } catch (Exception ex) {<a name="line.144"></a>
+<span class="sourceLineNo">145</span>      fail("testDigestSaslClientCallbackHandlerWithException error : " + ex.getMessage());<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    }<a name="line.146"></a>
+<span class="sourceLineNo">147</span>  }<a name="line.147"></a>
+<span class="sourceLineNo">148</span><a name="line.148"></a>
+<span class="sourceLineNo">149</span>  @Test<a name="line.149"></a>
+<span class="sourceLineNo">150</span>  public void testHBaseSaslRpcClientCreation() throws Exception {<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    //creation kerberos principal check section<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    assertFalse(assertSuccessCreationKerberosPrincipal(null));<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    assertFalse(assertSuccessCreationKerberosPrincipal("DOMAIN.COM"));<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    assertFalse(assertSuccessCreationKerberosPrincipal("principal/DOMAIN.COM"));<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    if (!assertSuccessCreationKerberosPrincipal("principal/localhost@DOMAIN.COM")) {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      // XXX: This can fail if kerberos support in the OS is not sane, see HBASE-10107.<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      // For now, don't assert, just warn<a name="line.157"></a>
+<span class="sourceLineNo">158</span>      LOG.warn("Could not create a SASL client with valid Kerberos credential");<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>    //creation digest principal check section<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    assertFalse(assertSuccessCreationDigestPrincipal(null, null));<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    assertFalse(assertSuccessCreationDigestPrincipal("", ""));<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    assertFalse(assertSuccessCreationDigestPrincipal("", null));<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    assertFalse(assertSuccessCreationDigestPrincipal(null, ""));<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    assertTrue(assertSuccessCreationDigestPrincipal(DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));<a name="line.166"></a>
+<span class="sourceLineNo">167</span><a name="line.167"></a>
+<span class="sourceLineNo">168</span>    //creation simple principal check section<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    assertFalse(assertSuccessCreationSimplePrincipal("", ""));<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    assertFalse(assertSuccessCreationSimplePrincipal(null, null));<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    assertFalse(assertSuccessCreationSimplePrincipal(DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));<a name="line.171"></a>
+<span class="sourceLineNo">172</span><a name="line.172"></a>
+<span class="sourceLineNo">173</span>    //exceptions check section<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    assertTrue(assertIOExceptionThenSaslClientIsNull(DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    assertTrue(assertIOExceptionWhenGetStreamsBeforeConnectCall(<a name="line.175"></a>
+<span class="sourceLineNo">176</span>        DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));<a name="line.176"></a>
+<span class="sourceLineNo">177</span>  }<a name="line.177"></a>
+<span class="sourceLineNo">178</span><a name="line.178"></a>
+<span class="sourceLineNo">179</span>  @Test<a name="line.179"></a>
+<span class="sourceLineNo">180</span>  public void testAuthMethodReadWrite() throws IOException {<a name="line.180"></a>
+<span class="sourceLineNo">181</span>    DataInputBuffer in = new DataInputBuffer();<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    DataOutputBuffer out = new DataOutputBuffer();<a name="line.182"></a>
+<span class="sourceLineNo">183</span><a name="line.183"></a>
+<span class="sourceLineNo">184</span>    assertAuthMethodRead(in, AuthMethod.SIMPLE);<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    assertAuthMethodRead(in, AuthMethod.KERBEROS);<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    assertAuthMethodRead(in, AuthMethod.DIGEST);<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>    assertAuthMethodWrite(out, AuthMethod.SIMPLE);<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    assertAuthMethodWrite(out, AuthMethod.KERBEROS);<a name="line.189"></a>
+<span class="sourceLineNo">190</span>    assertAuthMethodWrite(out, AuthMethod.DIGEST);<a name="line.190"></a>
+<span class="sourceLineNo">191</span>  }<a name="line.191"></a>
+<span class="sourceLineNo">192</span><a name="line.192"></a>
+<span class="sourceLineNo">193</span>  private void assertAuthMethodRead(DataInputBuffer in, AuthMethod authMethod)<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      throws IOException {<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    in.reset(new byte[] {authMethod.code}, 1);<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    assertEquals(authMethod, AuthMethod.read(in));<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  }<a name="line.197"></a>
+<span class="sourceLineNo">198</span><a name="line.198"></a>
+<span class="sourceLineNo">199</span>  private void assertAuthMethodWrite(DataOutputBuffer out, AuthMethod authMethod)<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      throws IOException {<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    authMethod.write(out);<a name="line.201"></a>
+<span class="sourceLineNo">202</span>    assertEquals(authMethod.code, out.getData()[0]);<a name="line.202"></a>
+<span class="sourceLineNo">203</span>    out.reset();<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  }<a name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>  private boolean assertIOExceptionWhenGetStreamsBeforeConnectCall(String principal,<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      String password) throws IOException {<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    boolean inState = false;<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    boolean outState = false;<a name="line.209"></a>
+<span class="sourceLineNo">210</span><a name="line.210"></a>
+<span class="sourceLineNo">211</span>    DigestSaslClientAuthenticationProvider provider = new DigestSaslClientAuthenticationProvider() {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      @Override<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      public SaslClient createClient(Configuration conf, InetAddress serverAddress,<a name="line.213"></a>
+<span class="sourceLineNo">214</span>          SecurityInfo securityInfo, Token&lt;? extends TokenIdentifier&gt; token,<a name="line.214"></a>
+<span class="sourceLineNo">215</span>          boolean fallbackAllowed, Map&lt;String, String&gt; saslProps) {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>        return Mockito.mock(SaslClient.class);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      }<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    };<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    HBaseSaslRpcClient rpcClient = new HBaseSaslRpcClient(HBaseConfiguration.create(), provider,<a name="line.219"></a>
+<span class="sourceLineNo">220</span>        createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class),<a name="line.220"></a>
+<span class="sourceLineNo">221</span>        Mockito.mock(SecurityInfo.class), false);<a name="line.221"></a>
+<span class="sourceLineNo">222</span><a name="line.222"></a>
+<span class="sourceLineNo">223</span>    try {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>      rpcClient.getInputStream();<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    } catch(IOException ex) {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>      //Sasl authentication exchange hasn't completed yet<a name="line.226"></a>
+<span class="sourceLineNo">227</span>      inState = true;<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    }<a name="line.228"></a>
+<span class="sourceLineNo">229</span><a name="line.229"></a>
+<span class="sourceLineNo">230</span>    try {<a name="line.230"></a>
+<span class="sourceLineNo">231</span>      rpcClient.getOutputStream();<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    } catch(IOException ex) {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      //Sasl authentication exchange hasn't completed yet<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      outState = true;<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    }<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>    return inState &amp;&amp; outState;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>  }<a name="line.238"></a>
+<span class="sourceLineNo">239</span><a name="line.239"></a>
+<span class="sourceLineNo">240</span>  private boolean assertIOExceptionThenSaslClientIsNull(String principal, String password) {<a name="line.240"></a>
+<span class="sourceLineNo">241</span>    try {<a name="line.241"></a>
+<span class="sourceLineNo">242</span>      DigestSaslClientAuthenticationProvider provider =<a name="line.242"></a>
+<span class="sourceLineNo">243</span>          new DigestSaslClientAuthenticationProvider() {<a name="line.243"></a>
+<span class="sourceLineNo">244</span>        @Override<a name="line.244"></a>
+<span class="sourceLineNo">245</span>        public SaslClient createClient(Configuration conf, InetAddress serverAddress,<a name="line.245"></a>
+<span class="sourceLineNo">246</span>            SecurityInfo securityInfo,<a name="line.246"></a>
+<span class="sourceLineNo">247</span>            Token&lt;? extends TokenIdentifier&gt; token, boolean fallbackAllowed,<a name="line.247"></a>
+<span class="sourceLineNo">248</span>            Map&lt;String, String&gt; saslProps) {<a name="line.248"></a>
+<span class="sourceLineNo">249</span>          return null;<a name="line.249"></a>
+<span class="sourceLineNo">250</span>        }<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      };<a name="line.251"></a>
+<span class="sourceLineNo">252</span>      new HBaseSaslRpcClient(HBaseConfiguration.create(), provider,<a name="line.252"></a>
+<span class="sourceLineNo">253</span>          createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class),<a name="line.253"></a>
+<span class="sourceLineNo">254</span>          Mockito.mock(SecurityInfo.class), false);<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      return false;<a name="line.255"></a>
+<span class="sourceLineNo">256</span>    } catch (IOException ex) {<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      return true;<a name="line.257"></a>
+<span class="sourceLineNo">258</span>    }<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  }<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  private boolean assertSuccessCreationKerberosPrincipal(String principal) {<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    HBaseSaslRpcClient rpcClient = null;<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    try {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      rpcClient = createSaslRpcClientForKerberos(principal);<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    } catch(Exception ex) {<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      LOG.error(ex.getMessage(), ex);<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    }<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    return rpcClient != null;<a name="line.268"></a>
+<span class="sourceLineNo">269</span>  }<a name="line.269"></a>
+<span class="sourceLineNo">270</span><a name="line.270"></a>
+<span class="sourceLineNo">271</span>  private boolean assertSuccessCreationDigestPrincipal(String principal, String password) {<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    HBaseSaslRpcClient rpcClient = null;<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    try {<a name="line.273"></a>
+<span class="sourceLineNo">274</span>      rpcClient = new HBaseSaslRpcClient(HBaseConfiguration.create(),<a name="line.274"></a>
+<span class="sourceLineNo">275</span>          new DigestSaslClientAuthenticationProvider(),<a name="line.275"></a>
+<span class="sourceLineNo">276</span>          createTokenMockWithCredentials(principal, password), Mockito.mock(InetAddress.class),<a name="line.276"></a>
+<span class="sourceLineNo">277</span>          Mockito.mock(SecurityInfo.class), false);<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    } catch(Exception ex) {<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      LOG.error(ex.getMessage(), ex);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    }<a name="line.280"></a>
+<span class="sourceLineNo">281</span>    return rpcClient != null;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>  }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>  private boolean assertSuccessCreationSimplePrincipal(String principal, String password) {<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    HBaseSaslRpcClient rpcClient = null;<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    try {<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      rpcClient = createSaslRpcClientSimple(principal, password);<a name="line.287"></a>
+<span class="sourceLineNo">288</span>    } catch(Exception ex) {<a name="line.288"></a>
+<span class="sourceLineNo">289</span>      LOG.error(ex.getMessage(), ex);<a name="line.289"></a>
+<span class="sourceLineNo">290</span>    }<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    return rpcClient != null;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  }<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>  private HBaseSaslRpcClient createSaslRpcClientForKerberos(String principal)<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      throws IOException {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>    return new HBaseSaslRpcClient(HBaseConfiguration.create(),<a name="line.296"></a>
+<span class="sourceLineNo">297</span>        new GssSaslClientAuthenticationProvider(), createTokenMock(),<a name="line.297"></a>
+<span class="sourceLineNo">298</span>        Mockito.mock(InetAddress.class), Mockito.mock(SecurityInfo.class), false);<a name="line.298"></a>
+<span class="sourceLineNo">299</span>  }<a name="line.299"></a>
+<span class="sourceLineNo">300</span><a name="line.300"></a>
+<span class="sourceLineNo">301</span>  private Token&lt;? extends TokenIdentifier&gt; createTokenMockWithCredentials(<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      String principal, String password)<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      throws IOException {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    Token&lt;? extends TokenIdentifier&gt; token = createTokenMock();<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    if (!Strings.isNullOrEmpty(principal) &amp;&amp; !Strings.isNullOrEmpty(password)) {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>      when(token.getIdentifier()).thenReturn(Bytes.toBytes(DEFAULT_USER_NAME));<a name="line.306"></a>
+<span class="sourceLineNo">307</span>      when(token.getPassword()).thenReturn(Bytes.toBytes(DEFAULT_USER_PASSWORD));<a name="line.307"></a>
+<span class="sourceLineNo">308</span>    }<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    return token;<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  private HBaseSaslRpcClient createSaslRpcClientSimple(String principal, String password)<a name="line.312"></a>
+<span class="sourceLineNo">313</span>      throws IOException {<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    return new HBaseSaslRpcClient(HBaseConfiguration.create(),<a name="line.314"></a>
+<span class="sourceLineNo">315</span>        new SimpleSaslClientAuthenticationProvider(), createTokenMock(),<a name="line.315"></a>
+<span class="sourceLineNo">316</span>        Mockito.mock(InetAddress.class), Mockito.mock(SecurityInfo.class), false);<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  @SuppressWarnings("unchecked")<a name="line.319"></a>
+<span class="sourceLineNo">320</span>  private Token&lt;? extends TokenIdentifier&gt; createTokenMock() {<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    return mock(Token.class);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>  }<a name="line.322"></a>
+<span class="sourceLineNo">323</span><a name="line.323"></a>
+<span class="sourceLineNo">324</span>  @Test(expected = IOException.class)<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   public void testFailedEvaluateResponse() throws IOException {<a name="line.325"></a>
+<span class="sourceLineNo">326</span>    //prep mockin the SaslClient<a name="line.326"></a>
+<span class="sourceLineNo">327</span>    SimpleSaslClientAuthenticationProvider mockProvider =<a name="line.327"></a>
+<span class="sourceLineNo">328</span>      Mockito.mock(SimpleSaslClientAuthenticationProvider.class);<a name="line.328"></a>
+<span class="sourceLineNo">329</span>    SaslClient mockClient = Mockito.mock(SaslClient.class);<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    Assert.assertNotNull(mockProvider);<a name="line.330"></a>
+<span class="sourceLineNo">331</span>    Assert.assertNotNull(mockClient);<a name="line.331"></a>
+<span class="sourceLineNo">332</span>    Mockito.when(mockProvider.createClient(Mockito.any(), Mockito.any(), Mockito.any(),<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      Mockito.any(), Mockito.anyBoolean(), Mockito.any())).thenReturn(mockClient);<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    HBaseSaslRpcClient rpcClient = new HBaseSaslRpcClient(HBaseConfiguration.create(),<a name="line.334"></a>
+<span class="sourceLineNo">335</span>      mockProvider, createTokenMock(),<a name="line.335"></a>
+<span class="sourceLineNo">336</span>      Mockito.mock(InetAddress.class), Mockito.mock(SecurityInfo.class), false);<a name="line.336"></a>
+<span class="sourceLineNo">337</span><a name="line.337"></a>
+<span class="sourceLineNo">338</span>    //simulate getting an error from a failed saslServer.evaluateResponse<a name="line.338"></a>
+<span class="sourceLineNo">339</span>    DataOutputBuffer errorBuffer = new DataOutputBuffer();<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    errorBuffer.writeInt(SaslStatus.ERROR.state);<a name="line.340"></a>
+<span class="sourceLineNo">341</span>    WritableUtils.writeString(errorBuffer, IOException.class.getName());<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    WritableUtils.writeString(errorBuffer, "Invalid Token");<a name="line.342"></a>
+<span class="sourceLineNo">343</span><a name="line.343"></a>
+<span class="sourceLineNo">344</span>    DataInputBuffer in = new DataInputBuffer();<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    in.reset(errorBuffer.getData(), 0, errorBuffer.getLength());<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    DataOutputBuffer out = new DataOutputBuffer();<a name="line.346"></a>
+<span class="sourceLineNo">347</span><a name="line.347"></a>
+<span class="sourceLineNo">348</span>    //simulate that authentication exchange has completed quickly after sending the token<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    Mockito.when(mockClient.isComplete()).thenReturn(true);<a name="line.349"></a>
+<span class="sourceLineNo">350</span>    rpcClient.saslConnect(in, out);<a name="line.350"></a>
+<span class="sourceLineNo">351</span>  }<a name="line.351"></a>
+<span class="sourceLineNo">352</span>}<a name="line.352"></a>
 
 
 
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html
index 3630a81..62d2f3c 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.html
@@ -7,631 +7,629 @@
 <body>
 <div class="sourceContainer">
 <pre><span class="sourceLineNo">001</span>/*<a name="line.1"></a>
-<span class="sourceLineNo">002</span> *<a name="line.2"></a>
-<span class="sourceLineNo">003</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.3"></a>
-<span class="sourceLineNo">004</span> * or more contributor license agreements.  See the NOTICE file<a name="line.4"></a>
-<span class="sourceLineNo">005</span> * distributed with this work for additional information<a name="line.5"></a>
-<span class="sourceLineNo">006</span> * regarding copyright ownership.  The ASF licenses this file<a name="line.6"></a>
-<span class="sourceLineNo">007</span> * to you under the Apache License, Version 2.0 (the<a name="line.7"></a>
-<span class="sourceLineNo">008</span> * "License"); you may not use this file except in compliance<a name="line.8"></a>
-<span class="sourceLineNo">009</span> * with the License.  You may obtain a copy of the License at<a name="line.9"></a>
-<span class="sourceLineNo">010</span> *<a name="line.10"></a>
-<span class="sourceLineNo">011</span> *     http://www.apache.org/licenses/LICENSE-2.0<a name="line.11"></a>
-<span class="sourceLineNo">012</span> *<a name="line.12"></a>
-<span class="sourceLineNo">013</span> * Unless required by applicable law or agreed to in writing, software<a name="line.13"></a>
-<span class="sourceLineNo">014</span> * distributed under the License is distributed on an "AS IS" BASIS,<a name="line.14"></a>
-<span class="sourceLineNo">015</span> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<a name="line.15"></a>
-<span class="sourceLineNo">016</span> * See the License for the specific language governing permissions and<a name="line.16"></a>
-<span class="sourceLineNo">017</span> * limitations under the License.<a name="line.17"></a>
-<span class="sourceLineNo">018</span> */<a name="line.18"></a>
-<span class="sourceLineNo">019</span>package org.apache.hadoop.hbase.wal;<a name="line.19"></a>
-<span class="sourceLineNo">020</span><a name="line.20"></a>
-<span class="sourceLineNo">021</span>import static org.apache.hadoop.hbase.regionserver.wal.AbstractTestWALReplay.addRegionEdits;<a name="line.21"></a>
-<span class="sourceLineNo">022</span>import static org.apache.hadoop.hbase.wal.BoundedRecoveredHFilesOutputSink.WAL_SPLIT_TO_HFILE;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import static org.junit.Assert.assertEquals;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import static org.junit.Assert.assertNotNull;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import static org.junit.Assert.assertTrue;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import static org.junit.Assert.fail;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import static org.mockito.Mockito.when;<a name="line.27"></a>
-<span class="sourceLineNo">028</span><a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.io.IOException;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import java.security.PrivilegedExceptionAction;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import java.util.ArrayList;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import java.util.Arrays;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import java.util.HashMap;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import java.util.List;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import java.util.Map;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.conf.Configuration;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileStatus;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FileSystem;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.Path;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.Cell;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.HConstants;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.ServerName;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.TableName;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.client.Get;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.client.Put;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.client.Result;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.io.hfile.CorruptHFileException;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.regionserver.DefaultStoreEngine;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.regionserver.RegionScanner;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.regionserver.RegionServerServices;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractTestWALReplay;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.regionserver.wal.FSHLog;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.security.User;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.testclassification.RegionServerTests;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.util.CommonFSUtils;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.EnvironmentEdge;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.util.FSTableDescriptors;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.junit.After;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.junit.AfterClass;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.junit.Before;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.junit.BeforeClass;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.junit.ClassRule;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.junit.Rule;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.junit.Test;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.junit.experimental.categories.Category;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.junit.rules.TestName;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.mockito.Mockito;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.slf4j.Logger;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.slf4j.LoggerFactory;<a name="line.86"></a>
-<span class="sourceLineNo">087</span><a name="line.87"></a>
-<span class="sourceLineNo">088</span>@Category({ RegionServerTests.class, MediumTests.class })<a name="line.88"></a>
-<span class="sourceLineNo">089</span>public class TestWALSplitToHFile {<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  @ClassRule<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.91"></a>
-<span class="sourceLineNo">092</span>      HBaseClassTestRule.forClass(TestWALSplitToHFile.class);<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>  private static final Logger LOG = LoggerFactory.getLogger(AbstractTestWALReplay.class);<a name="line.94"></a>
-<span class="sourceLineNo">095</span>  static final HBaseTestingUtility UTIL = new HBaseTestingUtility();<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  private final EnvironmentEdge ee = EnvironmentEdgeManager.getDelegate();<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  private Path rootDir = null;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  private String logName;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  private Path oldLogDir;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  private Path logDir;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  private FileSystem fs;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  private Configuration conf;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  private WALFactory wals;<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  private static final byte[] ROW = Bytes.toBytes("row");<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  private static final byte[] QUALIFIER = Bytes.toBytes("q");<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  private static final byte[] VALUE1 = Bytes.toBytes("value1");<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  private static final byte[] VALUE2 = Bytes.toBytes("value2");<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  private static final int countPerFamily = 10;<a name="line.109"></a>
-<span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>  @Rule<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  public final TestName TEST_NAME = new TestName();<a name="line.112"></a>
-<span class="sourceLineNo">113</span><a name="line.113"></a>
-<span class="sourceLineNo">114</span>  @BeforeClass<a name="line.114"></a>
-<span class="sourceLineNo">115</span>  public static void setUpBeforeClass() throws Exception {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    Configuration conf = UTIL.getConfiguration();<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    conf.setBoolean(WAL_SPLIT_TO_HFILE, true);<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    UTIL.startMiniCluster(3);<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    Path hbaseRootDir = UTIL.getDFSCluster().getFileSystem().makeQualified(new Path("/hbase"));<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    LOG.info("hbase.rootdir=" + hbaseRootDir);<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    CommonFSUtils.setRootDir(conf, hbaseRootDir);<a name="line.121"></a>
-<span class="sourceLineNo">122</span>  }<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span>  @AfterClass<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  public static void tearDownAfterClass() throws Exception {<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    UTIL.shutdownMiniCluster();<a name="line.126"></a>
-<span class="sourceLineNo">127</span>  }<a name="line.127"></a>
-<span class="sourceLineNo">128</span><a name="line.128"></a>
-<span class="sourceLineNo">129</span>  @Before<a name="line.129"></a>
-<span class="sourceLineNo">130</span>  public void setUp() throws Exception {<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    this.conf = HBaseConfiguration.create(UTIL.getConfiguration());<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    this.conf.setBoolean(HConstants.HREGION_EDITS_REPLAY_SKIP_ERRORS, false);<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    this.fs = UTIL.getDFSCluster().getFileSystem();<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    this.rootDir = CommonFSUtils.getRootDir(this.conf);<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    this.oldLogDir = new Path(this.rootDir, HConstants.HREGION_OLDLOGDIR_NAME);<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    String serverName =<a name="line.136"></a>
-<span class="sourceLineNo">137</span>        ServerName.valueOf(TEST_NAME.getMethodName() + "-manual", 16010, System.currentTimeMillis())<a name="line.137"></a>
-<span class="sourceLineNo">138</span>            .toString();<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    this.logName = AbstractFSWALProvider.getWALDirectoryName(serverName);<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    this.logDir = new Path(this.rootDir, logName);<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    if (UTIL.getDFSCluster().getFileSystem().exists(this.rootDir)) {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      UTIL.getDFSCluster().getFileSystem().delete(this.rootDir, true);<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    }<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    this.wals = new WALFactory(conf, TEST_NAME.getMethodName());<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  @After<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  public void tearDown() throws Exception {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    this.wals.close();<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    UTIL.getDFSCluster().getFileSystem().delete(this.rootDir, true);<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
-<span class="sourceLineNo">152</span><a name="line.152"></a>
-<span class="sourceLineNo">153</span>  /*<a name="line.153"></a>
-<span class="sourceLineNo">154</span>   * @param p Directory to cleanup<a name="line.154"></a>
-<span class="sourceLineNo">155</span>   */<a name="line.155"></a>
-<span class="sourceLineNo">156</span>  private void deleteDir(final Path p) throws IOException {<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    if (this.fs.exists(p)) {<a name="line.157"></a>
-<span class="sourceLineNo">158</span>      if (!this.fs.delete(p, true)) {<a name="line.158"></a>
-<span class="sourceLineNo">159</span>        throw new IOException("Failed remove of " + p);<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      }<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    }<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  }<a name="line.162"></a>
-<span class="sourceLineNo">163</span><a name="line.163"></a>
-<span class="sourceLineNo">164</span>  private TableDescriptor createBasic3FamilyTD(final TableName tableName) throws IOException {<a name="line.164"></a>
-<span class="sourceLineNo">165</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    builder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("a")).build());<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    builder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("b")).build());<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    builder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("c")).build());<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    TableDescriptor td = builder.build();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    UTIL.getAdmin().createTable(td);<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    return td;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>  }<a name="line.172"></a>
-<span class="sourceLineNo">173</span><a name="line.173"></a>
-<span class="sourceLineNo">174</span>  private WAL createWAL(Configuration c, Path hbaseRootDir, String logName) throws IOException {<a name="line.174"></a>
-<span class="sourceLineNo">175</span>    FSHLog wal = new FSHLog(FileSystem.get(c), hbaseRootDir, logName, c);<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    wal.init();<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    return wal;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  }<a name="line.178"></a>
-<span class="sourceLineNo">179</span><a name="line.179"></a>
-<span class="sourceLineNo">180</span>  private WAL createWAL(FileSystem fs, Path hbaseRootDir, String logName) throws IOException {<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    FSHLog wal = new FSHLog(fs, hbaseRootDir, logName, this.conf);<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    wal.init();<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    return wal;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>  }<a name="line.184"></a>
-<span class="sourceLineNo">185</span><a name="line.185"></a>
-<span class="sourceLineNo">186</span>  private Pair&lt;TableDescriptor, RegionInfo&gt; setupTableAndRegion() throws IOException {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>    final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    final TableDescriptor td = createBasic3FamilyTD(tableName);<a name="line.188"></a>
-<span class="sourceLineNo">189</span>    final RegionInfo ri = RegionInfoBuilder.newBuilder(tableName).build();<a name="line.189"></a>
-<span class="sourceLineNo">190</span>    final Path tableDir = CommonFSUtils.getTableDir(this.rootDir, tableName);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>    deleteDir(tableDir);<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    FSTableDescriptors.createTableDescriptorForTableDirectory(fs, tableDir, td, false);<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    HRegion region = HBaseTestingUtility.createRegionAndWAL(ri, rootDir, this.conf, td);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    HBaseTestingUtility.closeRegionAndWAL(region);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    return new Pair&lt;&gt;(td, ri);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>  }<a name="line.196"></a>
-<span class="sourceLineNo">197</span><a name="line.197"></a>
-<span class="sourceLineNo">198</span>  private void writeData(TableDescriptor td, HRegion region) throws IOException {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    final long timestamp = this.ee.currentTime();<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>      region.put(new Put(ROW).addColumn(cfd.getName(), QUALIFIER, timestamp, VALUE1));<a name="line.201"></a>
-<span class="sourceLineNo">202</span>    }<a name="line.202"></a>
-<span class="sourceLineNo">203</span>  }<a name="line.203"></a>
-<span class="sourceLineNo">204</span><a name="line.204"></a>
-<span class="sourceLineNo">205</span>  @Test<a name="line.205"></a>
-<span class="sourceLineNo">206</span>  public void testDifferentRootDirAndWALRootDir() throws Exception {<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    // Change wal root dir and reset the configuration<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    Path walRootDir = UTIL.createWALRootDir();<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    this.conf = HBaseConfiguration.create(UTIL.getConfiguration());<a name="line.209"></a>
-<span class="sourceLineNo">210</span><a name="line.210"></a>
-<span class="sourceLineNo">211</span>    FileSystem walFs = CommonFSUtils.getWALFileSystem(this.conf);<a name="line.211"></a>
-<span class="sourceLineNo">212</span>    this.oldLogDir = new Path(walRootDir, HConstants.HREGION_OLDLOGDIR_NAME);<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    String serverName =<a name="line.213"></a>
-<span class="sourceLineNo">214</span>        ServerName.valueOf(TEST_NAME.getMethodName() + "-manual", 16010, System.currentTimeMillis())<a name="line.214"></a>
-<span class="sourceLineNo">215</span>            .toString();<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    this.logName = AbstractFSWALProvider.getWALDirectoryName(serverName);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    this.logDir = new Path(walRootDir, logName);<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    this.wals = new WALFactory(conf, TEST_NAME.getMethodName());<a name="line.218"></a>
-<span class="sourceLineNo">219</span><a name="line.219"></a>
-<span class="sourceLineNo">220</span>    Pair&lt;TableDescriptor, RegionInfo&gt; pair = setupTableAndRegion();<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    TableDescriptor td = pair.getFirst();<a name="line.221"></a>
-<span class="sourceLineNo">222</span>    RegionInfo ri = pair.getSecond();<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>    WAL wal = createWAL(walFs, walRootDir, logName);<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    HRegion region = HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    writeData(td, region);<a name="line.226"></a>
-<span class="sourceLineNo">227</span><a name="line.227"></a>
-<span class="sourceLineNo">228</span>    // Now close the region without flush<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    region.close(true);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    wal.shutdown();<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    // split the log<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    WALSplitter.split(walRootDir, logDir, oldLogDir, FileSystem.get(this.conf), this.conf, wals);<a name="line.232"></a>
-<span class="sourceLineNo">233</span><a name="line.233"></a>
-<span class="sourceLineNo">234</span>    WAL wal2 = createWAL(walFs, walRootDir, logName);<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    HRegion region2 = HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal2);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    Result result2 = region2.get(new Get(ROW));<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    assertEquals(td.getColumnFamilies().length, result2.size());<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      assertTrue(Bytes.equals(VALUE1, result2.getValue(cfd.getName(), QUALIFIER)));<a name="line.239"></a>
-<span class="sourceLineNo">240</span>    }<a name="line.240"></a>
-<span class="sourceLineNo">241</span>  }<a name="line.241"></a>
-<span class="sourceLineNo">242</span><a name="line.242"></a>
-<span class="sourceLineNo">243</span>  @Test<a name="line.243"></a>
-<span class="sourceLineNo">244</span>  public void testCorruptRecoveredHFile() throws Exception {<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    Pair&lt;TableDescriptor, RegionInfo&gt; pair = setupTableAndRegion();<a name="line.245"></a>
-<span class="sourceLineNo">246</span>    TableDescriptor td = pair.getFirst();<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    RegionInfo ri = pair.getSecond();<a name="line.247"></a>
-<span class="sourceLineNo">248</span><a name="line.248"></a>
-<span class="sourceLineNo">249</span>    WAL wal = createWAL(this.conf, rootDir, logName);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    HRegion region = HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal);<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    writeData(td, region);<a name="line.251"></a>
-<span class="sourceLineNo">252</span><a name="line.252"></a>
-<span class="sourceLineNo">253</span>    // Now close the region without flush<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    region.close(true);<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    wal.shutdown();<a name="line.255"></a>
-<span class="sourceLineNo">256</span>    // split the log<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    WALSplitter.split(rootDir, logDir, oldLogDir, FileSystem.get(this.conf), this.conf, wals);<a name="line.257"></a>
-<span class="sourceLineNo">258</span><a name="line.258"></a>
-<span class="sourceLineNo">259</span>    // Write a corrupt recovered hfile<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    Path regionDir =<a name="line.260"></a>
-<span class="sourceLineNo">261</span>        new Path(CommonFSUtils.getTableDir(rootDir, td.getTableName()), ri.getEncodedName());<a name="line.261"></a>
-<span class="sourceLineNo">262</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      FileStatus[] files =<a name="line.263"></a>
-<span class="sourceLineNo">264</span>          WALSplitUtil.getRecoveredHFiles(this.fs, regionDir, cfd.getNameAsString());<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      assertNotNull(files);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      assertTrue(files.length &gt; 0);<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      writeCorruptRecoveredHFile(files[0].getPath());<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
-<span class="sourceLineNo">269</span><a name="line.269"></a>
-<span class="sourceLineNo">270</span>    // Failed to reopen the region<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    WAL wal2 = createWAL(this.conf, rootDir, logName);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    try {<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal2);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      fail("Should fail to open region");<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    } catch (CorruptHFileException che) {<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      // Expected<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    }<a name="line.277"></a>
-<span class="sourceLineNo">278</span><a name="line.278"></a>
-<span class="sourceLineNo">279</span>    // Set skip errors to true and reopen the region<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    this.conf.setBoolean(HConstants.HREGION_EDITS_REPLAY_SKIP_ERRORS, true);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    HRegion region2 = HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal2);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    Result result2 = region2.get(new Get(ROW));<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    assertEquals(td.getColumnFamilies().length, result2.size());<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      assertTrue(Bytes.equals(VALUE1, result2.getValue(cfd.getName(), QUALIFIER)));<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      // Assert the corrupt file was skipped and still exist<a name="line.286"></a>
-<span class="sourceLineNo">287</span>      FileStatus[] files =<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          WALSplitUtil.getRecoveredHFiles(this.fs, regionDir, cfd.getNameAsString());<a name="line.288"></a>
-<span class="sourceLineNo">289</span>      assertNotNull(files);<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      assertEquals(1, files.length);<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      assertTrue(files[0].getPath().getName().contains("corrupt"));<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    }<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  }<a name="line.293"></a>
-<span class="sourceLineNo">294</span><a name="line.294"></a>
-<span class="sourceLineNo">295</span>  @Test<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  public void testPutWithSameTimestamp() throws Exception {<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    Pair&lt;TableDescriptor, RegionInfo&gt; pair = setupTableAndRegion();<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    TableDescriptor td = pair.getFirst();<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    RegionInfo ri = pair.getSecond();<a name="line.299"></a>
-<span class="sourceLineNo">300</span><a name="line.300"></a>
-<span class="sourceLineNo">301</span>    WAL wal = createWAL(this.conf, rootDir, logName);<a name="line.301"></a>
-<span class="sourceLineNo">302</span>    HRegion region = HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>    final long timestamp = this.ee.currentTime();<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    // Write data and flush<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>      region.put(new Put(ROW).addColumn(cfd.getName(), QUALIFIER, timestamp, VALUE1));<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    }<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    region.flush(true);<a name="line.308"></a>
-<span class="sourceLineNo">309</span><a name="line.309"></a>
-<span class="sourceLineNo">310</span>    // Write data with same timestamp and do not flush<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      region.put(new Put(ROW).addColumn(cfd.getName(), QUALIFIER, timestamp, VALUE2));<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    }<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    // Now close the region without flush<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    region.close(true);<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    wal.shutdown();<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    // split the log<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    WALSplitter.split(rootDir, logDir, oldLogDir, FileSystem.get(this.conf), this.conf, wals);<a name="line.318"></a>
-<span class="sourceLineNo">319</span><a name="line.319"></a>
-<span class="sourceLineNo">320</span>    // reopen the region<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    WAL wal2 = createWAL(this.conf, rootDir, logName);<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    HRegion region2 = HRegion.openHRegion(conf, this.fs, rootDir, ri, td, wal2);<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    Result result2 = region2.get(new Get(ROW));<a name="line.323"></a>
-<span class="sourceLineNo">324</span>    assertEquals(td.getColumnFamilies().length, result2.size());<a name="line.324"></a>
-<span class="sourceLineNo">325</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      assertTrue(Bytes.equals(VALUE2, result2.getValue(cfd.getName(), QUALIFIER)));<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    }<a name="line.327"></a>
-<span class="sourceLineNo">328</span>  }<a name="line.328"></a>
-<span class="sourceLineNo">329</span><a name="line.329"></a>
-<span class="sourceLineNo">330</span>  @Test<a name="line.330"></a>
-<span class="sourceLineNo">331</span>  public void testRecoverSequenceId() throws Exception {<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    Pair&lt;TableDescriptor, RegionInfo&gt; pair = setupTableAndRegion();<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    TableDescriptor td = pair.getFirst();<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    RegionInfo ri = pair.getSecond();<a name="line.334"></a>
-<span class="sourceLineNo">335</span><a name="line.335"></a>
-<span class="sourceLineNo">336</span>    WAL wal = createWAL(this.conf, rootDir, logName);<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    HRegion region = HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal);<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    Map&lt;Integer, Map&lt;String, Long&gt;&gt; seqIdMap = new HashMap&lt;&gt;();<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    // Write data and do not flush<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    for (int i = 0; i &lt; countPerFamily; i++) {<a name="line.340"></a>
-<span class="sourceLineNo">341</span>      for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>        region.put(new Put(Bytes.toBytes(i)).addColumn(cfd.getName(), QUALIFIER, VALUE1));<a name="line.342"></a>
-<span class="sourceLineNo">343</span>        Result result = region.get(new Get(Bytes.toBytes(i)).addFamily(cfd.getName()));<a name="line.343"></a>
-<span class="sourceLineNo">344</span>        assertTrue(Bytes.equals(VALUE1, result.getValue(cfd.getName(), QUALIFIER)));<a name="line.344"></a>
-<span class="sourceLineNo">345</span>        List&lt;Cell&gt; cells = result.listCells();<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        assertEquals(1, cells.size());<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        seqIdMap.computeIfAbsent(i, r -&gt; new HashMap&lt;&gt;()).put(cfd.getNameAsString(),<a name="line.347"></a>
-<span class="sourceLineNo">348</span>          cells.get(0).getSequenceId());<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      }<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    }<a name="line.350"></a>
-<span class="sourceLineNo">351</span><a name="line.351"></a>
-<span class="sourceLineNo">352</span>    // Now close the region without flush<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    region.close(true);<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    wal.shutdown();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    // split the log<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    WALSplitter.split(rootDir, logDir, oldLogDir, FileSystem.get(this.conf), this.conf, wals);<a name="line.356"></a>
-<span class="sourceLineNo">357</span><a name="line.357"></a>
-<span class="sourceLineNo">358</span>    // reopen the region<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    WAL wal2 = createWAL(this.conf, rootDir, logName);<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    HRegion region2 = HRegion.openHRegion(conf, this.fs, rootDir, ri, td, wal2);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    // assert the seqid was recovered<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    for (int i = 0; i &lt; countPerFamily; i++) {<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>        Result result = region2.get(new Get(Bytes.toBytes(i)).addFamily(cfd.getName()));<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        assertTrue(Bytes.equals(VALUE1, result.getValue(cfd.getName(), QUALIFIER)));<a name="line.365"></a>
-<span class="sourceLineNo">366</span>        List&lt;Cell&gt; cells = result.listCells();<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        assertEquals(1, cells.size());<a name="line.367"></a>
-<span class="sourceLineNo">368</span>        assertEquals((long) seqIdMap.get(i).get(cfd.getNameAsString()),<a name="line.368"></a>
-<span class="sourceLineNo">369</span>          cells.get(0).getSequenceId());<a name="line.369"></a>
-<span class="sourceLineNo">370</span>      }<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    }<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  }<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  /**<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   * Test writing edits into an HRegion, closing it, splitting logs, opening<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   * Region again.  Verify seqids.<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   */<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  @Test<a name="line.378"></a>
-<span class="sourceLineNo">379</span>  public void testWrittenViaHRegion()<a name="line.379"></a>
-<span class="sourceLineNo">380</span>      throws IOException, SecurityException, IllegalArgumentException, InterruptedException {<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    Pair&lt;TableDescriptor, RegionInfo&gt; pair = setupTableAndRegion();<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    TableDescriptor td = pair.getFirst();<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    RegionInfo ri = pair.getSecond();<a name="line.383"></a>
-<span class="sourceLineNo">384</span><a name="line.384"></a>
-<span class="sourceLineNo">385</span>    // Write countPerFamily edits into the three families.  Do a flush on one<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    // of the families during the load of edits so its seqid is not same as<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    // others to test we do right thing when different seqids.<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    WAL wal = createWAL(this.conf, rootDir, logName);<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    HRegion region = HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    long seqid = region.getOpenSeqNum();<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    boolean first = true;<a name="line.391"></a>
-<span class="sourceLineNo">392</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      addRegionEdits(ROW, cfd.getName(), countPerFamily, this.ee, region, "x");<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      if (first) {<a name="line.394"></a>
-<span class="sourceLineNo">395</span>        // If first, so we have at least one family w/ different seqid to rest.<a name="line.395"></a>
-<span class="sourceLineNo">396</span>        region.flush(true);<a name="line.396"></a>
-<span class="sourceLineNo">397</span>        first = false;<a name="line.397"></a>
-<span class="sourceLineNo">398</span>      }<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    }<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    // Now assert edits made it in.<a name="line.400"></a>
-<span class="sourceLineNo">401</span>    final Get g = new Get(ROW);<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    Result result = region.get(g);<a name="line.402"></a>
-<span class="sourceLineNo">403</span>    assertEquals(countPerFamily * td.getColumnFamilies().length, result.size());<a name="line.403"></a>
-<span class="sourceLineNo">404</span>    // Now close the region (without flush), split the log, reopen the region and assert that<a name="line.404"></a>
-<span class="sourceLineNo">405</span>    // replay of log has the correct effect, that our seqids are calculated correctly so<a name="line.405"></a>
-<span class="sourceLineNo">406</span>    // all edits in logs are seen as 'stale'/old.<a name="line.406"></a>
-<span class="sourceLineNo">407</span>    region.close(true);<a name="line.407"></a>
-<span class="sourceLineNo">408</span>    wal.shutdown();<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    try {<a name="line.409"></a>
-<span class="sourceLineNo">410</span>      WALSplitter.split(rootDir, logDir, oldLogDir, FileSystem.get(this.conf), this.conf, wals);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    } catch (Exception e) {<a name="line.411"></a>
-<span class="sourceLineNo">412</span>      LOG.debug("Got exception", e);<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    }<a name="line.413"></a>
-<span class="sourceLineNo">414</span><a name="line.414"></a>
-<span class="sourceLineNo">415</span>    WAL wal2 = createWAL(this.conf, rootDir, logName);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    HRegion region2 = HRegion.openHRegion(conf, this.fs, rootDir, ri, td, wal2);<a name="line.416"></a>
-<span class="sourceLineNo">417</span>    long seqid2 = region2.getOpenSeqNum();<a name="line.417"></a>
-<span class="sourceLineNo">418</span>    assertTrue(seqid + result.size() &lt; seqid2);<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    final Result result1b = region2.get(g);<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    assertEquals(result.size(), result1b.size());<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>    // Next test.  Add more edits, then 'crash' this region by stealing its wal<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    // out from under it and assert that replay of the log adds the edits back<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    // correctly when region is opened again.<a name="line.424"></a>
-<span class="sourceLineNo">425</span>    for (ColumnFamilyDescriptor hcd : td.getColumnFamilies()) {<a name="line.425"></a>
-<span class="sourceLineNo">426</span>      addRegionEdits(ROW, hcd.getName(), countPerFamily, this.ee, region2, "y");<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    }<a name="line.427"></a>
-<span class="sourceLineNo">428</span>    // Get count of edits.<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    final Result result2 = region2.get(g);<a name="line.429"></a>
-<span class="sourceLineNo">430</span>    assertEquals(2 * result.size(), result2.size());<a name="line.430"></a>
-<span class="sourceLineNo">431</span>    wal2.sync();<a name="line.431"></a>
-<span class="sourceLineNo">432</span>    final Configuration newConf = HBaseConfiguration.create(this.conf);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>    User user = HBaseTestingUtility.getDifferentUser(newConf, td.getTableName().getNameAsString());<a name="line.433"></a>
-<span class="sourceLineNo">434</span>    user.runAs(new PrivilegedExceptionAction&lt;Object&gt;() {<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      @Override<a name="line.435"></a>
-<span class="sourceLineNo">436</span>      public Object run() throws Exception {<a name="line.436"></a>
-<span class="sourceLineNo">437</span>        WALSplitter.split(rootDir, logDir, oldLogDir, FileSystem.get(conf), conf, wals);<a name="line.437"></a>
-<span class="sourceLineNo">438</span>        FileSystem newFS = FileSystem.get(newConf);<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        // Make a new wal for new region open.<a name="line.439"></a>
-<span class="sourceLineNo">440</span>        WAL wal3 = createWAL(newConf, rootDir, logName);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        Path tableDir = CommonFSUtils.getTableDir(rootDir, td.getTableName());<a name="line.441"></a>
-<span class="sourceLineNo">442</span>        HRegion region3 = new HRegion(tableDir, wal3, newFS, newConf, ri, td, null);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        long seqid3 = region3.initialize();<a name="line.443"></a>
-<span class="sourceLineNo">444</span>        Result result3 = region3.get(g);<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        // Assert that count of cells is same as before crash.<a name="line.445"></a>
-<span class="sourceLineNo">446</span>        assertEquals(result2.size(), result3.size());<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>        // I can't close wal1.  Its been appropriated when we split.<a name="line.448"></a>
-<span class="sourceLineNo">449</span>        region3.close();<a name="line.449"></a>
-<span class="sourceLineNo">450</span>        wal3.close();<a name="line.450"></a>
-<span class="sourceLineNo">451</span>        return null;<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      }<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    });<a name="line.453"></a>
-<span class="sourceLineNo">454</span>  }<a name="line.454"></a>
-<span class="sourceLineNo">455</span><a name="line.455"></a>
-<span class="sourceLineNo">456</span>  /**<a name="line.456"></a>
-<span class="sourceLineNo">457</span>   * Test that we recover correctly when there is a failure in between the<a name="line.457"></a>
-<span class="sourceLineNo">458</span>   * flushes. i.e. Some stores got flushed but others did not.<a name="line.458"></a>
-<span class="sourceLineNo">459</span>   * Unfortunately, there is no easy hook to flush at a store level. The way<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * we get around this is by flushing at the region level, and then deleting<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * the recently flushed store file for one of the Stores. This would put us<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   * back in the situation where all but that store got flushed and the region<a name="line.462"></a>
-<span class="sourceLineNo">463</span>   * died.<a name="line.463"></a>
-<span class="sourceLineNo">464</span>   * We restart Region again, and verify that the edits were replayed.<a name="line.464"></a>
-<span class="sourceLineNo">465</span>   */<a name="line.465"></a>
-<span class="sourceLineNo">466</span>  @Test<a name="line.466"></a>
-<span class="sourceLineNo">467</span>  public void testAfterPartialFlush()<a name="line.467"></a>
-<span class="sourceLineNo">468</span>      throws IOException, SecurityException, IllegalArgumentException {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>    Pair&lt;TableDescriptor, RegionInfo&gt; pair = setupTableAndRegion();<a name="line.469"></a>
-<span class="sourceLineNo">470</span>    TableDescriptor td = pair.getFirst();<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    RegionInfo ri = pair.getSecond();<a name="line.471"></a>
-<span class="sourceLineNo">472</span><a name="line.472"></a>
-<span class="sourceLineNo">473</span>    // Write countPerFamily edits into the three families.  Do a flush on one<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    // of the families during the load of edits so its seqid is not same as<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    // others to test we do right thing when different seqids.<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    WAL wal = createWAL(this.conf, rootDir, logName);<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    HRegion region = HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal);<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    long seqid = region.getOpenSeqNum();<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.479"></a>
-<span class="sourceLineNo">480</span>      addRegionEdits(ROW, cfd.getName(), countPerFamily, this.ee, region, "x");<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    }<a name="line.481"></a>
-<span class="sourceLineNo">482</span><a name="line.482"></a>
-<span class="sourceLineNo">483</span>    // Now assert edits made it in.<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    final Get g = new Get(ROW);<a name="line.484"></a>
-<span class="sourceLineNo">485</span>    Result result = region.get(g);<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    assertEquals(countPerFamily * td.getColumnFamilies().length, result.size());<a name="line.486"></a>
-<span class="sourceLineNo">487</span><a name="line.487"></a>
-<span class="sourceLineNo">488</span>    // Let us flush the region<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    region.flush(true);<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    region.close(true);<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    wal.shutdown();<a name="line.491"></a>
-<span class="sourceLineNo">492</span><a name="line.492"></a>
-<span class="sourceLineNo">493</span>    // delete the store files in the second column family to simulate a failure<a name="line.493"></a>
-<span class="sourceLineNo">494</span>    // in between the flushcache();<a name="line.494"></a>
-<span class="sourceLineNo">495</span>    // we have 3 families. killing the middle one ensures that taking the maximum<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    // will make us fail.<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    int cf_count = 0;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      cf_count++;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      if (cf_count == 2) {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>        region.getRegionFileSystem().deleteFamily(cfd.getNameAsString());<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      }<a name="line.502"></a>
-<span class="sourceLineNo">503</span>    }<a name="line.503"></a>
-<span class="sourceLineNo">504</span><a name="line.504"></a>
-<span class="sourceLineNo">505</span>    // Let us try to split and recover<a name="line.505"></a>
-<span class="sourceLineNo">506</span>    WALSplitter.split(rootDir, logDir, oldLogDir, FileSystem.get(this.conf), this.conf, wals);<a name="line.506"></a>
-<span class="sourceLineNo">507</span>    WAL wal2 = createWAL(this.conf, rootDir, logName);<a name="line.507"></a>
-<span class="sourceLineNo">508</span>    HRegion region2 = HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal2);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    long seqid2 = region2.getOpenSeqNum();<a name="line.509"></a>
-<span class="sourceLineNo">510</span>    assertTrue(seqid + result.size() &lt; seqid2);<a name="line.510"></a>
-<span class="sourceLineNo">511</span><a name="line.511"></a>
-<span class="sourceLineNo">512</span>    final Result result1b = region2.get(g);<a name="line.512"></a>
-<span class="sourceLineNo">513</span>    assertEquals(result.size(), result1b.size());<a name="line.513"></a>
-<span class="sourceLineNo">514</span>  }<a name="line.514"></a>
-<span class="sourceLineNo">515</span><a name="line.515"></a>
-<span class="sourceLineNo">516</span>  /**<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   * Test that we could recover the data correctly after aborting flush. In the<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * test, first we abort flush after writing some data, then writing more data<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   * and flush again, at last verify the data.<a name="line.519"></a>
-<span class="sourceLineNo">520</span>   */<a name="line.520"></a>
-<span class="sourceLineNo">521</span>  @Test<a name="line.521"></a>
-<span class="sourceLineNo">522</span>  public void testAfterAbortingFlush() throws IOException {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>    Pair&lt;TableDescriptor, RegionInfo&gt; pair = setupTableAndRegion();<a name="line.523"></a>
-<span class="sourceLineNo">524</span>    TableDescriptor td = pair.getFirst();<a name="line.524"></a>
-<span class="sourceLineNo">525</span>    RegionInfo ri = pair.getSecond();<a name="line.525"></a>
-<span class="sourceLineNo">526</span><a name="line.526"></a>
-<span class="sourceLineNo">527</span>    // Write countPerFamily edits into the three families. Do a flush on one<a name="line.527"></a>
-<span class="sourceLineNo">528</span>    // of the families during the load of edits so its seqid is not same as<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    // others to test we do right thing when different seqids.<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    WAL wal = createWAL(this.conf, rootDir, logName);<a name="line.530"></a>
-<span class="sourceLineNo">531</span>    RegionServerServices rsServices = Mockito.mock(RegionServerServices.class);<a name="line.531"></a>
-<span class="sourceLineNo">532</span>    Mockito.doReturn(false).when(rsServices).isAborted();<a name="line.532"></a>
-<span class="sourceLineNo">533</span>    when(rsServices.getServerName()).thenReturn(ServerName.valueOf("foo", 10, 10));<a name="line.533"></a>
-<span class="sourceLineNo">534</span>    when(rsServices.getConfiguration()).thenReturn(conf);<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    Configuration customConf = new Configuration(this.conf);<a name="line.535"></a>
-<span class="sourceLineNo">536</span>    customConf.set(DefaultStoreEngine.DEFAULT_STORE_FLUSHER_CLASS_KEY,<a name="line.536"></a>
-<span class="sourceLineNo">537</span>        AbstractTestWALReplay.CustomStoreFlusher.class.getName());<a name="line.537"></a>
-<span class="sourceLineNo">538</span>    HRegion region = HRegion.openHRegion(this.rootDir, ri, td, wal, customConf, rsServices, null);<a name="line.538"></a>
-<span class="sourceLineNo">539</span>    int writtenRowCount = 10;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    List&lt;ColumnFamilyDescriptor&gt; families = Arrays.asList(td.getColumnFamilies());<a name="line.540"></a>
-<span class="sourceLineNo">541</span>    for (int i = 0; i &lt; writtenRowCount; i++) {<a name="line.541"></a>
-<span class="sourceLineNo">542</span>      Put put = new Put(Bytes.toBytes(td.getTableName() + Integer.toString(i)));<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      put.addColumn(families.get(i % families.size()).getName(), Bytes.toBytes("q"),<a name="line.543"></a>
-<span class="sourceLineNo">544</span>          Bytes.toBytes("val"));<a name="line.544"></a>
-<span class="sourceLineNo">545</span>      region.put(put);<a name="line.545"></a>
-<span class="sourceLineNo">546</span>    }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>    // Now assert edits made it in.<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    RegionScanner scanner = region.getScanner(new Scan());<a name="line.549"></a>
-<span class="sourceLineNo">550</span>    assertEquals(writtenRowCount, getScannedCount(scanner));<a name="line.550"></a>
-<span class="sourceLineNo">551</span><a name="line.551"></a>
-<span class="sourceLineNo">552</span>    // Let us flush the region<a name="line.552"></a>
-<span class="sourceLineNo">553</span>    AbstractTestWALReplay.CustomStoreFlusher.throwExceptionWhenFlushing.set(true);<a name="line.553"></a>
-<span class="sourceLineNo">554</span>    try {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      region.flush(true);<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      fail("Injected exception hasn't been thrown");<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    } catch (IOException e) {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>      LOG.info("Expected simulated exception when flushing region, {}", e.getMessage());<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      // simulated to abort server<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      Mockito.doReturn(true).when(rsServices).isAborted();<a name="line.560"></a>
-<span class="sourceLineNo">561</span>      region.setClosing(false); // region normally does not accept writes after<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      // DroppedSnapshotException. We mock around it for this test.<a name="line.562"></a>
-<span class="sourceLineNo">563</span>    }<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    // writing more data<a name="line.564"></a>
-<span class="sourceLineNo">565</span>    int moreRow = 10;<a name="line.565"></a>
-<span class="sourceLineNo">566</span>    for (int i = writtenRowCount; i &lt; writtenRowCount + moreRow; i++) {<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      Put put = new Put(Bytes.toBytes(td.getTableName() + Integer.toString(i)));<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      put.addColumn(families.get(i % families.size()).getName(), Bytes.toBytes("q"),<a name="line.568"></a>
-<span class="sourceLineNo">569</span>          Bytes.toBytes("val"));<a name="line.569"></a>
-<span class="sourceLineNo">570</span>      region.put(put);<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    }<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    writtenRowCount += moreRow;<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    // call flush again<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    AbstractTestWALReplay.CustomStoreFlusher.throwExceptionWhenFlushing.set(false);<a name="line.574"></a>
-<span class="sourceLineNo">575</span>    try {<a name="line.575"></a>
-<span class="sourceLineNo">576</span>      region.flush(true);<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    } catch (IOException t) {<a name="line.577"></a>
-<span class="sourceLineNo">578</span>      LOG.info(<a name="line.578"></a>
-<span class="sourceLineNo">579</span>          "Expected exception when flushing region because server is stopped," + t.getMessage());<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    }<a name="line.580"></a>
-<span class="sourceLineNo">581</span><a name="line.581"></a>
-<span class="sourceLineNo">582</span>    region.close(true);<a name="line.582"></a>
-<span class="sourceLineNo">583</span>    wal.shutdown();<a name="line.583"></a>
-<span class="sourceLineNo">584</span><a name="line.584"></a>
-<span class="sourceLineNo">585</span>    // Let us try to split and recover<a name="line.585"></a>
-<span class="sourceLineNo">586</span>    WALSplitter.split(rootDir, logDir, oldLogDir, FileSystem.get(this.conf), this.conf, wals);<a name="line.586"></a>
-<span class="sourceLineNo">587</span>    WAL wal2 = createWAL(this.conf, rootDir, logName);<a name="line.587"></a>
-<span class="sourceLineNo">588</span>    Mockito.doReturn(false).when(rsServices).isAborted();<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    HRegion region2 = HRegion.openHRegion(this.rootDir, ri, td, wal2, this.conf, rsServices, null);<a name="line.589"></a>
-<span class="sourceLineNo">590</span>    scanner = region2.getScanner(new Scan());<a name="line.590"></a>
-<span class="sourceLineNo">591</span>    assertEquals(writtenRowCount, getScannedCount(scanner));<a name="line.591"></a>
-<span class="sourceLineNo">592</span>  }<a name="line.592"></a>
-<span class="sourceLineNo">593</span><a name="line.593"></a>
-<span class="sourceLineNo">594</span>  private int getScannedCount(RegionScanner scanner) throws IOException {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>    int scannedCount = 0;<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    List&lt;Cell&gt; results = new ArrayList&lt;&gt;();<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    while (true) {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>      boolean existMore = scanner.next(results);<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      if (!results.isEmpty()) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>        scannedCount++;<a name="line.600"></a>
-<span class="sourceLineNo">601</span>      }<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      if (!existMore) {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        break;<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      }<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      results.clear();<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    }<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    return scannedCount;<a name="line.607"></a>
-<span class="sourceLineNo">608</span>  }<a name="line.608"></a>
-<span class="sourceLineNo">609</span><a name="line.609"></a>
-<span class="sourceLineNo">610</span>  private void writeCorruptRecoveredHFile(Path recoveredHFile) throws Exception {<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    // Read the recovered hfile<a name="line.611"></a>
-<span class="sourceLineNo">612</span>    int fileSize = (int) fs.listStatus(recoveredHFile)[0].getLen();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    FSDataInputStream in = fs.open(recoveredHFile);<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    byte[] fileContent = new byte[fileSize];<a name="line.614"></a>
-<span class="sourceLineNo">615</span>    in.readFully(0, fileContent, 0, fileSize);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    in.close();<a name="line.616"></a>
-<span class="sourceLineNo">617</span><a name="line.617"></a>
-<span class="sourceLineNo">618</span>    // Write a corrupt hfile by append garbage<a name="line.618"></a>
-<span class="sourceLineNo">619</span>    Path path = new Path(recoveredHFile.getParent(), recoveredHFile.getName() + ".corrupt");<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    FSDataOutputStream out;<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    out = fs.create(path);<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    out.write(fileContent);<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    out.write(Bytes.toBytes("-----"));<a name="line.623"></a>
-<span class="sourceLineNo">624</span>    out.close();<a name="line.624"></a>
-<span class="sourceLineNo">625</span>  }<a name="line.625"></a>
-<span class="sourceLineNo">626</span>}<a name="line.626"></a>
+<span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a>
+<span class="sourceLineNo">003</span> * or more contributor license agreements.  See the NOTICE file<a name="line.3"></a>
+<span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a>
+<span class="sourceLineNo">005</span> * regarding copyright ownership.  The ASF licenses this file<a name="line.5"></a>
+<span class="sourceLineNo">006</span> * to you under the Apache License, Version 2.0 (the<a name="line.6"></a>
+<span class="sourceLineNo">007</span> * "License"); you may not use this file except in compliance<a name="line.7"></a>
+<span class="sourceLineNo">008</span> * with the License.  You may obtain a copy of the License at<a name="line.8"></a>
+<span class="sourceLineNo">009</span> *<a name="line.9"></a>
+<span class="sourceLineNo">010</span> *     http://www.apache.org/licenses/LICENSE-2.0<a name="line.10"></a>
+<span class="sourceLineNo">011</span> *<a name="line.11"></a>
+<span class="sourceLineNo">012</span> * Unless required by applicable law or agreed to in writing, software<a name="line.12"></a>
+<span class="sourceLineNo">013</span> * distributed under the License is distributed on an "AS IS" BASIS,<a name="line.13"></a>
+<span class="sourceLineNo">014</span> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<a name="line.14"></a>
+<span class="sourceLineNo">015</span> * See the License for the specific language governing permissions and<a name="line.15"></a>
+<span class="sourceLineNo">016</span> * limitations under the License.<a name="line.16"></a>
+<span class="sourceLineNo">017</span> */<a name="line.17"></a>
+<span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.wal;<a name="line.18"></a>
+<span class="sourceLineNo">019</span><a name="line.19"></a>
+<span class="sourceLineNo">020</span>import static org.apache.hadoop.hbase.regionserver.wal.AbstractTestWALReplay.addRegionEdits;<a name="line.20"></a>
+<span class="sourceLineNo">021</span>import static org.apache.hadoop.hbase.wal.WALSplitter.WAL_SPLIT_TO_HFILE;<a name="line.21"></a>
+<span class="sourceLineNo">022</span>import static org.junit.Assert.assertEquals;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import static org.junit.Assert.assertNotNull;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import static org.junit.Assert.assertTrue;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import static org.junit.Assert.fail;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import static org.mockito.Mockito.when;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import java.io.IOException;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import java.security.PrivilegedExceptionAction;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import java.util.ArrayList;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.Arrays;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import java.util.HashMap;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import java.util.List;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import java.util.Map;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.conf.Configuration;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.fs.FSDataInputStream;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.fs.FSDataOutputStream;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.fs.FileStatus;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.fs.FileSystem;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.Path;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.Cell;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.HConstants;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.ServerName;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.TableName;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.Get;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.client.Put;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.client.Result;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.io.hfile.CorruptHFileException;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.regionserver.DefaultStoreEngine;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.regionserver.RegionScanner;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.regionserver.RegionServerServices;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractTestWALReplay;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.regionserver.wal.FSHLog;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.security.User;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.testclassification.RegionServerTests;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.util.CommonFSUtils;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.util.EnvironmentEdge;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.util.FSTableDescriptors;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.junit.After;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.junit.AfterClass;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.junit.Before;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.junit.BeforeClass;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.junit.ClassRule;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.junit.Rule;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.junit.Test;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.junit.experimental.categories.Category;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.junit.rules.TestName;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.mockito.Mockito;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.slf4j.Logger;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.slf4j.LoggerFactory;<a name="line.84"></a>
+<span class="sourceLineNo">085</span><a name="line.85"></a>
+<span class="sourceLineNo">086</span>@Category({ RegionServerTests.class, MediumTests.class })<a name="line.86"></a>
+<span class="sourceLineNo">087</span>public class TestWALSplitToHFile {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  @ClassRule<a name="line.88"></a>
+<span class="sourceLineNo">089</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.89"></a>
+<span class="sourceLineNo">090</span>      HBaseClassTestRule.forClass(TestWALSplitToHFile.class);<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>  private static final Logger LOG = LoggerFactory.getLogger(AbstractTestWALReplay.class);<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  static final HBaseTestingUtility UTIL = new HBaseTestingUtility();<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  private final EnvironmentEdge ee = EnvironmentEdgeManager.getDelegate();<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  private Path rootDir = null;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  private String logName;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  private Path oldLogDir;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>  private Path logDir;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>  private FileSystem fs;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  private Configuration conf;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  private WALFactory wals;<a name="line.101"></a>
+<span class="sourceLineNo">102</span><a name="line.102"></a>
+<span class="sourceLineNo">103</span>  private static final byte[] ROW = Bytes.toBytes("row");<a name="line.103"></a>
+<span class="sourceLineNo">104</span>  private static final byte[] QUALIFIER = Bytes.toBytes("q");<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  private static final byte[] VALUE1 = Bytes.toBytes("value1");<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  private static final byte[] VALUE2 = Bytes.toBytes("value2");<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  private static final int countPerFamily = 10;<a name="line.107"></a>
+<span class="sourceLineNo">108</span><a name="line.108"></a>
+<span class="sourceLineNo">109</span>  @Rule<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  public final TestName TEST_NAME = new TestName();<a name="line.110"></a>
+<span class="sourceLineNo">111</span><a name="line.111"></a>
+<span class="sourceLineNo">112</span>  @BeforeClass<a name="line.112"></a>
+<span class="sourceLineNo">113</span>  public static void setUpBeforeClass() throws Exception {<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    Configuration conf = UTIL.getConfiguration();<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    conf.setBoolean(WAL_SPLIT_TO_HFILE, true);<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    UTIL.startMiniCluster(3);<a name="line.116"></a>
+<span class="sourceLineNo">117</span>    Path hbaseRootDir = UTIL.getDFSCluster().getFileSystem().makeQualified(new Path("/hbase"));<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    LOG.info("hbase.rootdir=" + hbaseRootDir);<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    CommonFSUtils.setRootDir(conf, hbaseRootDir);<a name="line.119"></a>
+<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  @AfterClass<a name="line.122"></a>
+<span class="sourceLineNo">123</span>  public static void tearDownAfterClass() throws Exception {<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    UTIL.shutdownMiniCluster();<a name="line.124"></a>
+<span class="sourceLineNo">125</span>  }<a name="line.125"></a>
+<span class="sourceLineNo">126</span><a name="line.126"></a>
+<span class="sourceLineNo">127</span>  @Before<a name="line.127"></a>
+<span class="sourceLineNo">128</span>  public void setUp() throws Exception {<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    this.conf = HBaseConfiguration.create(UTIL.getConfiguration());<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    this.conf.setBoolean(HConstants.HREGION_EDITS_REPLAY_SKIP_ERRORS, false);<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    this.fs = UTIL.getDFSCluster().getFileSystem();<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    this.rootDir = CommonFSUtils.getRootDir(this.conf);<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    this.oldLogDir = new Path(this.rootDir, HConstants.HREGION_OLDLOGDIR_NAME);<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    String serverName =<a name="line.134"></a>
+<span class="sourceLineNo">135</span>        ServerName.valueOf(TEST_NAME.getMethodName() + "-manual", 16010, System.currentTimeMillis())<a name="line.135"></a>
+<span class="sourceLineNo">136</span>            .toString();<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    this.logName = AbstractFSWALProvider.getWALDirectoryName(serverName);<a name="line.137"></a>
+<span class="sourceLineNo">138</span>    this.logDir = new Path(this.rootDir, logName);<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    if (UTIL.getDFSCluster().getFileSystem().exists(this.rootDir)) {<a name="line.139"></a>
+<span class="sourceLineNo">140</span>      UTIL.getDFSCluster().getFileSystem().delete(this.rootDir, true);<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    }<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    this.wals = new WALFactory(conf, TEST_NAME.getMethodName());<a name="line.142"></a>
+<span class="sourceLineNo">143</span>  }<a name="line.143"></a>
+<span class="sourceLineNo">144</span><a name="line.144"></a>
+<span class="sourceLineNo">145</span>  @After<a name="line.145"></a>
+<span class="sourceLineNo">146</span>  public void tearDown() throws Exception {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    this.wals.close();<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    UTIL.getDFSCluster().getFileSystem().delete(this.rootDir, true);<a name="line.148"></a>
+<span class="sourceLineNo">149</span>  }<a name="line.149"></a>
+<span class="sourceLineNo">150</span><a name="line.150"></a>
+<span class="sourceLineNo">151</span>  /*<a name="line.151"></a>
+<span class="sourceLineNo">152</span>   * @param p Directory to cleanup<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   */<a name="line.153"></a>
+<span class="sourceLineNo">154</span>  private void deleteDir(final Path p) throws IOException {<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    if (this.fs.exists(p)) {<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      if (!this.fs.delete(p, true)) {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>        throw new IOException("Failed remove of " + p);<a name="line.157"></a>
+<span class="sourceLineNo">158</span>      }<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    }<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  }<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>  private TableDescriptor createBasic3FamilyTD(final TableName tableName) throws IOException {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    builder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("a")).build());<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    builder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("b")).build());<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    builder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("c")).build());<a name="line.166"></a>
+<span class="sourceLineNo">167</span>    TableDescriptor td = builder.build();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    UTIL.getAdmin().createTable(td);<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    return td;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>  }<a name="line.170"></a>
+<span class="sourceLineNo">171</span><a name="line.171"></a>
+<span class="sourceLineNo">172</span>  private WAL createWAL(Configuration c, Path hbaseRootDir, String logName) throws IOException {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>    FSHLog wal = new FSHLog(FileSystem.get(c), hbaseRootDir, logName, c);<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    wal.init();<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    return wal;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  }<a name="line.176"></a>
+<span class="sourceLineNo">177</span><a name="line.177"></a>
+<span class="sourceLineNo">178</span>  private WAL createWAL(FileSystem fs, Path hbaseRootDir, String logName) throws IOException {<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    FSHLog wal = new FSHLog(fs, hbaseRootDir, logName, this.conf);<a name="line.179"></a>
+<span class="sourceLineNo">180</span>    wal.init();<a name="line.180"></a>
+<span class="sourceLineNo">181</span>    return wal;<a name="line.181"></a>
+<span class="sourceLineNo">182</span>  }<a name="line.182"></a>
+<span class="sourceLineNo">183</span><a name="line.183"></a>
+<span class="sourceLineNo">184</span>  private Pair&lt;TableDescriptor, RegionInfo&gt; setupTableAndRegion() throws IOException {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    final TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    final TableDescriptor td = createBasic3FamilyTD(tableName);<a name="line.186"></a>
+<span class="sourceLineNo">187</span>    final RegionInfo ri = RegionInfoBuilder.newBuilder(tableName).build();<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    final Path tableDir = CommonFSUtils.getTableDir(this.rootDir, tableName);<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    deleteDir(tableDir);<a name="line.189"></a>
+<span class="sourceLineNo">190</span>    FSTableDescriptors.createTableDescriptorForTableDirectory(fs, tableDir, td, false);<a name="line.190"></a>
+<span class="sourceLineNo">191</span>    HRegion region = HBaseTestingUtility.createRegionAndWAL(ri, rootDir, this.conf, td);<a name="line.191"></a>
+<span class="sourceLineNo">192</span>    HBaseTestingUtility.closeRegionAndWAL(region);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    return new Pair&lt;&gt;(td, ri);<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  }<a name="line.194"></a>
+<span class="sourceLineNo">195</span><a name="line.195"></a>
+<span class="sourceLineNo">196</span>  private void writeData(TableDescriptor td, HRegion region) throws IOException {<a name="line.196"></a>
+<span class="sourceLineNo">197</span>    final long timestamp = this.ee.currentTime();<a name="line.197"></a>
+<span class="sourceLineNo">198</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      region.put(new Put(ROW).addColumn(cfd.getName(), QUALIFIER, timestamp, VALUE1));<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    }<a name="line.200"></a>
+<span class="sourceLineNo">201</span>  }<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>  @Test<a name="line.203"></a>
+<span class="sourceLineNo">204</span>  public void testDifferentRootDirAndWALRootDir() throws Exception {<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    // Change wal root dir and reset the configuration<a name="line.205"></a>
+<span class="sourceLineNo">206</span>    Path walRootDir = UTIL.createWALRootDir();<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    this.conf = HBaseConfiguration.create(UTIL.getConfiguration());<a name="line.207"></a>
+<span class="sourceLineNo">208</span><a name="line.208"></a>
+<span class="sourceLineNo">209</span>    FileSystem walFs = CommonFSUtils.getWALFileSystem(this.conf);<a name="line.209"></a>
+<span class="sourceLineNo">210</span>    this.oldLogDir = new Path(walRootDir, HConstants.HREGION_OLDLOGDIR_NAME);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    String serverName =<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        ServerName.valueOf(TEST_NAME.getMethodName() + "-manual", 16010, System.currentTimeMillis())<a name="line.212"></a>
+<span class="sourceLineNo">213</span>            .toString();<a name="line.213"></a>
+<span class="sourceLineNo">214</span>    this.logName = AbstractFSWALProvider.getWALDirectoryName(serverName);<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    this.logDir = new Path(walRootDir, logName);<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    this.wals = new WALFactory(conf, TEST_NAME.getMethodName());<a name="line.216"></a>
+<span class="sourceLineNo">217</span><a name="line.217"></a>
+<span class="sourceLineNo">218</span>    Pair&lt;TableDescriptor, RegionInfo&gt; pair = setupTableAndRegion();<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    TableDescriptor td = pair.getFirst();<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    RegionInfo ri = pair.getSecond();<a name="line.220"></a>
+<span class="sourceLineNo">221</span><a name="line.221"></a>
+<span class="sourceLineNo">222</span>    WAL wal = createWAL(walFs, walRootDir, logName);<a name="line.222"></a>
+<span class="sourceLineNo">223</span>    HRegion region = HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal);<a name="line.223"></a>
+<span class="sourceLineNo">224</span>    writeData(td, region);<a name="line.224"></a>
+<span class="sourceLineNo">225</span><a name="line.225"></a>
+<span class="sourceLineNo">226</span>    // Now close the region without flush<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    region.close(true);<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    wal.shutdown();<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    // split the log<a name="line.229"></a>
+<span class="sourceLineNo">230</span>    WALSplitter.split(walRootDir, logDir, oldLogDir, FileSystem.get(this.conf), this.conf, wals);<a name="line.230"></a>
+<span class="sourceLineNo">231</span><a name="line.231"></a>
+<span class="sourceLineNo">232</span>    WAL wal2 = createWAL(walFs, walRootDir, logName);<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    HRegion region2 = HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal2);<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    Result result2 = region2.get(new Get(ROW));<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    assertEquals(td.getColumnFamilies().length, result2.size());<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.236"></a>
+<span class="sourceLineNo">237</span>      assertTrue(Bytes.equals(VALUE1, result2.getValue(cfd.getName(), QUALIFIER)));<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    }<a name="line.238"></a>
+<span class="sourceLineNo">239</span>  }<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>  @Test<a name="line.241"></a>
+<span class="sourceLineNo">242</span>  public void testCorruptRecoveredHFile() throws Exception {<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    Pair&lt;TableDescriptor, RegionInfo&gt; pair = setupTableAndRegion();<a name="line.243"></a>
+<span class="sourceLineNo">244</span>    TableDescriptor td = pair.getFirst();<a name="line.244"></a>
+<span class="sourceLineNo">245</span>    RegionInfo ri = pair.getSecond();<a name="line.245"></a>
+<span class="sourceLineNo">246</span><a name="line.246"></a>
+<span class="sourceLineNo">247</span>    WAL wal = createWAL(this.conf, rootDir, logName);<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    HRegion region = HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal);<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    writeData(td, region);<a name="line.249"></a>
+<span class="sourceLineNo">250</span><a name="line.250"></a>
+<span class="sourceLineNo">251</span>    // Now close the region without flush<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    region.close(true);<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    wal.shutdown();<a name="line.253"></a>
+<span class="sourceLineNo">254</span>    // split the log<a name="line.254"></a>
+<span class="sourceLineNo">255</span>    WALSplitter.split(rootDir, logDir, oldLogDir, FileSystem.get(this.conf), this.conf, wals);<a name="line.255"></a>
+<span class="sourceLineNo">256</span><a name="line.256"></a>
+<span class="sourceLineNo">257</span>    // Write a corrupt recovered hfile<a name="line.257"></a>
+<span class="sourceLineNo">258</span>    Path regionDir =<a name="line.258"></a>
+<span class="sourceLineNo">259</span>        new Path(CommonFSUtils.getTableDir(rootDir, td.getTableName()), ri.getEncodedName());<a name="line.259"></a>
+<span class="sourceLineNo">260</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.260"></a>
+<span class="sourceLineNo">261</span>      FileStatus[] files =<a name="line.261"></a>
+<span class="sourceLineNo">262</span>          WALSplitUtil.getRecoveredHFiles(this.fs, regionDir, cfd.getNameAsString());<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      assertNotNull(files);<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      assertTrue(files.length &gt; 0);<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      writeCorruptRecoveredHFile(files[0].getPath());<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    }<a name="line.266"></a>
+<span class="sourceLineNo">267</span><a name="line.267"></a>
+<span class="sourceLineNo">268</span>    // Failed to reopen the region<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    WAL wal2 = createWAL(this.conf, rootDir, logName);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    try {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>      HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal2);<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      fail("Should fail to open region");<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    } catch (CorruptHFileException che) {<a name="line.273"></a>
+<span class="sourceLineNo">274</span>      // Expected<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    }<a name="line.275"></a>
+<span class="sourceLineNo">276</span><a name="line.276"></a>
+<span class="sourceLineNo">277</span>    // Set skip errors to true and reopen the region<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    this.conf.setBoolean(HConstants.HREGION_EDITS_REPLAY_SKIP_ERRORS, true);<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    HRegion region2 = HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal2);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    Result result2 = region2.get(new Get(ROW));<a name="line.280"></a>
+<span class="sourceLineNo">281</span>    assertEquals(td.getColumnFamilies().length, result2.size());<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      assertTrue(Bytes.equals(VALUE1, result2.getValue(cfd.getName(), QUALIFIER)));<a name="line.283"></a>
+<span class="sourceLineNo">284</span>      // Assert the corrupt file was skipped and still exist<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      FileStatus[] files =<a name="line.285"></a>
+<span class="sourceLineNo">286</span>          WALSplitUtil.getRecoveredHFiles(this.fs, regionDir, cfd.getNameAsString());<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      assertNotNull(files);<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      assertEquals(1, files.length);<a name="line.288"></a>
+<span class="sourceLineNo">289</span>      assertTrue(files[0].getPath().getName().contains("corrupt"));<a name="line.289"></a>
+<span class="sourceLineNo">290</span>    }<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>  @Test<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  public void testPutWithSameTimestamp() throws Exception {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    Pair&lt;TableDescriptor, RegionInfo&gt; pair = setupTableAndRegion();<a name="line.295"></a>
+<span class="sourceLineNo">296</span>    TableDescriptor td = pair.getFirst();<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    RegionInfo ri = pair.getSecond();<a name="line.297"></a>
+<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">299</span>    WAL wal = createWAL(this.conf, rootDir, logName);<a name="line.299"></a>
+<span class="sourceLineNo">300</span>    HRegion region = HRegion.openHRegion(this.conf, this.fs, rootDir, ri, td, wal);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    final long timestamp = this.ee.currentTime();<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    // Write data and flush<a name="line.302"></a>
+<span class="sourceLineNo">303</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.303"></a>
+<span class="sourceLineNo">304</span>      region.put(new Put(ROW).addColumn(cfd.getName(), QUALIFIER, timestamp, VALUE1));<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    }<a name="line.305"></a>
+<span class="sourceLineNo">306</span>    region.flush(true);<a name="line.306"></a>
+<span class="sourceLineNo">307</span><a name="line.307"></a>
+<span class="sourceLineNo">308</span>    // Write data with same timestamp and do not flush<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>      region.put(new Put(ROW).addColumn(cfd.getName(), QUALIFIER, timestamp, VALUE2));<a name="line.310"></a>
+<span class="sourceLineNo">311</span>    }<a name="line.311"></a>
+<span class="sourceLineNo">312</span>    // Now close the region without flush<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    region.close(true);<a name="line.313"></a>
... 314 lines suppressed ...