You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by gi...@apache.org on 2019/05/18 14:50:55 UTC

[hbase-site] branch asf-site updated: Published site at 930691a846c217976081a220f6560f94e03726a4.

This is an automated email from the ASF dual-hosted git repository.

git-site-role pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/hbase-site.git


The following commit(s) were added to refs/heads/asf-site by this push:
     new 26d2253  Published site at 930691a846c217976081a220f6560f94e03726a4.
26d2253 is described below

commit 26d22537706694d021c3aa1b1ced14d453be83f8
Author: jenkins <bu...@apache.org>
AuthorDate: Sat May 18 14:50:41 2019 +0000

    Published site at 930691a846c217976081a220f6560f94e03726a4.
---
 acid-semantics.html                                |    2 +-
 apache_hbase_reference_guide.pdf                   |    4 +-
 book.html                                          |    2 +-
 bulk-loads.html                                    |    2 +-
 checkstyle-aggregate.html                          |   10 +-
 coc.html                                           |    2 +-
 dependencies.html                                  |    2 +-
 dependency-convergence.html                        |    2 +-
 dependency-info.html                               |    2 +-
 dependency-management.html                         |    2 +-
 devapidocs/allclasses-frame.html                   |    6 -
 devapidocs/allclasses-noframe.html                 |    6 -
 devapidocs/constant-values.html                    |    4 +-
 devapidocs/index-all.html                          |   68 +-
 .../apache/hadoop/hbase/backup/package-tree.html   |    2 +-
 .../apache/hadoop/hbase/client/package-tree.html   |   24 +-
 .../apache/hadoop/hbase/executor/package-tree.html |    2 +-
 .../apache/hadoop/hbase/filter/package-tree.html   |    6 +-
 .../org/apache/hadoop/hbase/http/package-tree.html |    2 +-
 ...OutOneBlockAsyncDFSOutputHelper.BlockAdder.html |  243 ----
 ...OneBlockAsyncDFSOutputHelper.CancelOnClose.html |   16 +-
 ...eBlockAsyncDFSOutputHelper.ChecksumCreater.html |  226 ----
 ...BlockAsyncDFSOutputHelper.DFSClientAdaptor.html |    8 +-
 ...utOneBlockAsyncDFSOutputHelper.FileCreator.html |    6 +-
 ...tOneBlockAsyncDFSOutputHelper.LeaseManager.html |    6 +-
 ...lockAsyncDFSOutputHelper.NameNodeException.html |   10 +-
 ...anOutOneBlockAsyncDFSOutputHelper.PBHelper.html |  239 ----
 ...yncDFSOutputHelper.PipelineAckStatusGetter.html |  226 ----
 ...lockAsyncDFSOutputHelper.StorageTypeSetter.html |  228 ----
 .../FanOutOneBlockAsyncDFSOutputHelper.html        |  363 +----
 ...ockAsyncDFSOutputSaslHelper.DecryptHandler.html |    8 +-
 ...ockAsyncDFSOutputSaslHelper.EncryptHandler.html |   14 +-
 ...tOneBlockAsyncDFSOutputSaslHelper.PBHelper.html |  239 ----
 ...eBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html |    4 +-
 ...OutputSaslHelper.SaslClientCallbackHandler.html |   10 +-
 ...ncDFSOutputSaslHelper.SaslNegotiateHandler.html |   50 +-
 ...AsyncDFSOutputSaslHelper.SaslUnwrapHandler.html |   10 +-
 ...ckAsyncDFSOutputSaslHelper.SaslWrapHandler.html |   16 +-
 ...FSOutputSaslHelper.TransparentCryptoHelper.html |    4 +-
 .../FanOutOneBlockAsyncDFSOutputSaslHelper.html    |  119 +-
 ...OutOneBlockAsyncDFSOutputHelper.BlockAdder.html |  178 ---
 ...eBlockAsyncDFSOutputHelper.ChecksumCreater.html |  188 ---
 ...anOutOneBlockAsyncDFSOutputHelper.PBHelper.html |  178 ---
 ...yncDFSOutputHelper.PipelineAckStatusGetter.html |  186 ---
 ...lockAsyncDFSOutputHelper.StorageTypeSetter.html |  178 ---
 ...tOneBlockAsyncDFSOutputSaslHelper.PBHelper.html |  178 ---
 ...FSOutputSaslHelper.TransparentCryptoHelper.html |    4 +-
 .../hadoop/hbase/io/asyncfs/package-frame.html     |    6 -
 .../hadoop/hbase/io/asyncfs/package-summary.html   |   24 -
 .../hadoop/hbase/io/asyncfs/package-tree.html      |    6 -
 .../hadoop/hbase/io/asyncfs/package-use.html       |   18 -
 .../apache/hadoop/hbase/io/hfile/package-tree.html |    6 +-
 .../org/apache/hadoop/hbase/ipc/package-tree.html  |    2 +-
 .../hadoop/hbase/mapreduce/package-tree.html       |    4 +-
 .../hbase/master/assignment/package-tree.html      |    2 +-
 .../apache/hadoop/hbase/master/package-tree.html   |    6 +-
 .../hbase/master/procedure/package-tree.html       |    4 +-
 .../org/apache/hadoop/hbase/package-tree.html      |   20 +-
 .../hadoop/hbase/procedure2/package-tree.html      |    2 +-
 .../apache/hadoop/hbase/quotas/package-tree.html   |    4 +-
 .../hadoop/hbase/regionserver/package-tree.html    |   20 +-
 .../regionserver/querymatcher/package-tree.html    |    2 +-
 .../hbase/regionserver/wal/package-tree.html       |    4 +-
 .../hadoop/hbase/rest/model/package-tree.html      |    2 +-
 .../hadoop/hbase/security/access/package-tree.html |    2 +-
 .../apache/hadoop/hbase/security/package-tree.html |    2 +-
 .../apache/hadoop/hbase/thrift/package-tree.html   |    2 +-
 .../org/apache/hadoop/hbase/util/package-tree.html |   10 +-
 .../org/apache/hadoop/hbase/wal/package-tree.html  |    2 +-
 devapidocs/overview-tree.html                      |    6 -
 .../src-html/org/apache/hadoop/hbase/Version.html  |    4 +-
 ...OutOneBlockAsyncDFSOutputHelper.BlockAdder.html |  969 --------------
 ...OneBlockAsyncDFSOutputHelper.CancelOnClose.html | 1404 ++++++++------------
 ...eBlockAsyncDFSOutputHelper.ChecksumCreater.html |  969 --------------
 ...BlockAsyncDFSOutputHelper.DFSClientAdaptor.html | 1404 ++++++++------------
 ...utOneBlockAsyncDFSOutputHelper.FileCreator.html | 1404 ++++++++------------
 ...tOneBlockAsyncDFSOutputHelper.LeaseManager.html | 1404 ++++++++------------
 ...lockAsyncDFSOutputHelper.NameNodeException.html | 1404 ++++++++------------
 ...anOutOneBlockAsyncDFSOutputHelper.PBHelper.html |  969 --------------
 ...yncDFSOutputHelper.PipelineAckStatusGetter.html |  969 --------------
 ...lockAsyncDFSOutputHelper.StorageTypeSetter.html |  969 --------------
 .../FanOutOneBlockAsyncDFSOutputHelper.html        | 1404 ++++++++------------
 ...ockAsyncDFSOutputSaslHelper.DecryptHandler.html | 1320 +++++++++---------
 ...ockAsyncDFSOutputSaslHelper.EncryptHandler.html | 1320 +++++++++---------
 ...tOneBlockAsyncDFSOutputSaslHelper.PBHelper.html |  899 -------------
 ...eBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html | 1320 +++++++++---------
 ...OutputSaslHelper.SaslClientCallbackHandler.html | 1320 +++++++++---------
 ...ncDFSOutputSaslHelper.SaslNegotiateHandler.html | 1320 +++++++++---------
 ...AsyncDFSOutputSaslHelper.SaslUnwrapHandler.html | 1320 +++++++++---------
 ...ckAsyncDFSOutputSaslHelper.SaslWrapHandler.html | 1320 +++++++++---------
 ...FSOutputSaslHelper.TransparentCryptoHelper.html | 1320 +++++++++---------
 .../FanOutOneBlockAsyncDFSOutputSaslHelper.html    | 1320 +++++++++---------
 downloads.html                                     |    2 +-
 export_control.html                                |    2 +-
 index.html                                         |    2 +-
 issue-management.html                              |    2 +-
 licenses.html                                      |    2 +-
 mailing-lists.html                                 |    2 +-
 metrics.html                                       |    2 +-
 old_news.html                                      |    2 +-
 plugin-management.html                             |    2 +-
 plugins.html                                       |    2 +-
 poweredbyhbase.html                                |    2 +-
 project-info.html                                  |    2 +-
 project-reports.html                               |    2 +-
 pseudo-distributed.html                            |    2 +-
 replication.html                                   |    2 +-
 resources.html                                     |    2 +-
 scm.html                                           |    2 +-
 sponsors.html                                      |    2 +-
 summary.html                                       |    2 +-
 supportingprojects.html                            |    2 +-
 team.html                                          |    2 +-
 113 files changed, 9419 insertions(+), 20116 deletions(-)

diff --git a/acid-semantics.html b/acid-semantics.html
index 68decb9..7d41775 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -457,7 +457,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 7103cf1..16daf65 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20190517144748+00'00')
-/CreationDate (D:20190517144748+00'00')
+/ModDate (D:20190518144649+00'00')
+/CreationDate (D:20190518144649+00'00')
 >>
 endobj
 2 0 obj
diff --git a/book.html b/book.html
index 68f9d72..1364a4a 100644
--- a/book.html
+++ b/book.html
@@ -41726,7 +41726,7 @@ org/apache/hadoop/hbase/security/access/AccessControlClient.revoke:(Lorg/apache/
 <div id="footer">
 <div id="footer-text">
 Version 3.0.0-SNAPSHOT<br>
-Last updated 2019-05-17 14:29:39 UTC
+Last updated 2019-05-18 14:29:37 UTC
 </div>
 </div>
 </body>
diff --git a/bulk-loads.html b/bulk-loads.html
index 16bbaec..5ceb521 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -162,7 +162,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 30aabbb..2883951 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -8896,12 +8896,12 @@
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_javadoc.html#JavadocTagContinuationIndentation">JavadocTagContinuationIndentation</a>
 <ul>
 <li>offset: <tt>&quot;2&quot;</tt></li></ul></td>
-<td>680</td>
+<td>681</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="b">
 <td></td>
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription">NonEmptyAtclauseDescription</a></td>
-<td>3251</td>
+<td>3250</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="a">
 <td>misc</td>
@@ -17706,7 +17706,7 @@
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
-<td>NonEmptyAtclauseDescription</td>
+<td>JavadocTagContinuationIndentation</td>
 <td>Javadoc comment at column 0 has parse error. Details: no viable alternative at input '   *' while parsing JAVADOC_TAG</td>
 <td>117</td></tr>
 <tr class="a">
@@ -39872,7 +39872,7 @@
 <td>coding</td>
 <td>InnerAssignment</td>
 <td>Inner assignments should be avoided.</td>
-<td>609</td></tr></table></div>
+<td>336</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.io.compress.Compression.java">org/apache/hadoop/hbase/io/compress/Compression.java</h3>
 <table border="0" class="table table-striped">
@@ -102881,7 +102881,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/coc.html b/coc.html
index fb5ca5a..6768863 100644
--- a/coc.html
+++ b/coc.html
@@ -231,7 +231,7 @@ email to <a class="externalLink" href="mailto:private@hbase.apache.org">the priv
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependencies.html b/dependencies.html
index 72426fd..0c3a86d 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -311,7 +311,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 8083e38..a4d8fdc 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -590,7 +590,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-info.html b/dependency-info.html
index 1eb5652..e5caa32 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -184,7 +184,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-management.html b/dependency-management.html
index c109e02..d42c347 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -894,7 +894,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2019
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-17</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2019-05-18</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/devapidocs/allclasses-frame.html b/devapidocs/allclasses-frame.html
index d0b7927..bd823a9 100644
--- a/devapidocs/allclasses-frame.html
+++ b/devapidocs/allclasses-frame.html
@@ -884,20 +884,14 @@
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html" title="class in org.apache.hadoop.hbase.io.asyncfs" target="classFrame">FanOutOneBlockAsyncDFSOutput.Callback</a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.State.html" title="enum in org.apache.hadoop.hbase.io.asyncfs" target="classFrame">FanOutOneBlockAsyncDFSOutput.State</a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs" target="classFrame">FanOutOneBlockAsyncDFSOutputHelper</a></li>
-<li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html" title="class in org.apache.hadoop.hbase.io.asyncfs" target="classFrame">FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose</a></li>
-<li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.LeaseManager</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html" title="class in org.apache.hadoop.hbase.io.asyncfs" target="classFrame">FanOutOneBlockAsyncDFSOutputHelper.NameNodeException</a></li>
-<li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</span></a></li>
-<li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</span></a></li>
-<li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs" target="classFrame">FanOutOneBlockAsyncDFSOutputSaslHelper</a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs" target="classFrame">FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler</a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs" target="classFrame">FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler</a></li>
-<li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs" target="classFrame">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler</a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs" target="classFrame">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler</a></li>
diff --git a/devapidocs/allclasses-noframe.html b/devapidocs/allclasses-noframe.html
index 3b91f76..4bb9520 100644
--- a/devapidocs/allclasses-noframe.html
+++ b/devapidocs/allclasses-noframe.html
@@ -884,20 +884,14 @@
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.Callback.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput.Callback</a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.State.html" title="enum in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput.State</a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></li>
-<li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose</a></li>
-<li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.LeaseManager</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.NameNodeException</a></li>
-<li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</span></a></li>
-<li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</span></a></li>
-<li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper</a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler</a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler</a></li>
-<li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor</span></a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler</a></li>
 <li><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler</a></li>
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 30a99aa..babe01f 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -3927,7 +3927,7 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#date">date</a></code></td>
-<td class="colLast"><code>"Fri May 17 14:35:11 UTC 2019"</code></td>
+<td class="colLast"><code>"Sat May 18 14:34:54 UTC 2019"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.revision">
@@ -3941,7 +3941,7 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#srcChecksum">srcChecksum</a></code></td>
-<td class="colLast"><code>"652d292d831152f5ab758be46fc6153c"</code></td>
+<td class="colLast"><code>"a44f407917e1e878dfbdf9c53f3a3781"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.url">
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 3374d96..c38e92d 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -1413,8 +1413,6 @@
 <dd>
 <div class="block">Inserts the specified element at the end of this queue.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html#addBlock-org.apache.hadoop.hdfs.protocol.ClientProtocol-java.lang.String-java.lang.String-org.apache.hadoop.hdfs.protocol.ExtendedBlock-org.apache.hadoop.hdfs.protocol.DatanodeInfo:A-long-java.lang.String:A-">addBlock(ClientProtocol, String, String, ExtendedBlock, DatanodeInfo[], long, String[])</a></span> - Method in interface org.apache.hadoop.hbase.io.asyncfs [...]
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html#addBloomFilter-org.apache.hadoop.hbase.util.BloomFilterWriter-org.apache.hadoop.hbase.io.hfile.BlockType-">addBloomFilter(BloomFilterWriter, BlockType)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileWriterImpl</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/LossyCounting.html#addByOne-java.lang.String-">addByOne(String)</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/LossyCounting.html" title="class in org.apache.hadoop.hbase.util">LossyCounting</a></dt>
@@ -6222,8 +6220,6 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/CellArrayMap.html#block">block</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/CellArrayMap.html" title="class in org.apache.hadoop.hbase.regionserver">CellArrayMap</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#BLOCK_ADDER">BLOCK_ADDER</a></span> - Static variable in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html#BLOCK_CACHE_BLOOM_CHUNK_HIT_COUNT">BLOCK_CACHE_BLOOM_CHUNK_HIT_COUNT</a></span> - Static variable in interface org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html" title="interface in org.apache.hadoop.hbase.regionserver">MetricsRegionServerSource</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html#BLOCK_CACHE_BLOOM_CHUNK_MISS_COUNT">BLOCK_CACHE_BLOOM_CHUNK_MISS_COUNT</a></span> - Static variable in interface org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html" title="interface in org.apache.hadoop.hbase.regionserver">MetricsRegionServerSource</a></dt>
@@ -11323,8 +11319,6 @@
 <dd>
 <div class="block">Make sure the directories under rootDir have good permissions.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#CHECKSUM_CREATER">CHECKSUM_CREATER</a></span> - Static variable in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFile.html#CHECKSUM_FAILURES">CHECKSUM_FAILURES</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/MetricsIOSource.html#CHECKSUM_FAILURES_DESC">CHECKSUM_FAILURES_DESC</a></span> - Static variable in interface org.apache.hadoop.hbase.io.<a href="org/apache/hadoop/hbase/io/MetricsIOSource.html" title="interface in org.apache.hadoop.hbase.io">MetricsIOSource</a></dt>
@@ -18027,10 +18021,6 @@
              in org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil.</span></div>
 </div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html#convert-org.apache.hadoop.hdfs.protocol.ExtendedBlock-">convert(ExtendedBlock)</a></span> - Method in interface org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a></dt>
-<dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html#convert-org.apache.hadoop.security.token.Token-">convert(Token&lt;?&gt;)</a></span> - Method in interface org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/Reference.html#convert--">convert()</a></span> - Method in class org.apache.hadoop.hbase.io.<a href="org/apache/hadoop/hbase/io/Reference.html" title="class in org.apache.hadoop.hbase.io">Reference</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/Reference.html#convert-org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference-">convert(FSProtos.Reference)</a></span> - Static method in class org.apache.hadoop.hbase.io.<a href="org/apache/hadoop/hbase/io/Reference.html" title="class in org.apache.hadoop.hbase.io">Reference</a></dt>
@@ -18091,10 +18081,6 @@
 <dd>
 <div class="block">Converts a long expressed in a byte array to an actual long</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html#convertCipherOptionProtos-java.util.List-">convertCipherOptionProtos(List&lt;HdfsProtos.CipherOptionProto&gt;)</a></span> - Method in interface org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputS [...]
-<dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html#convertCipherOptions-java.util.List-">convertCipherOptions(List&lt;CipherOption&gt;)</a></span> - Method in interface org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/ByteBufferUtils.Converter.html#Converter--">Converter()</a></span> - Constructor for class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/ByteBufferUtils.Converter.html" title="class in org.apache.hadoop.hbase.util">ByteBufferUtils.Converter</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/Bytes.Converter.html#Converter--">Converter()</a></span> - Constructor for class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/Bytes.Converter.html" title="class in org.apache.hadoop.hbase.util">Bytes.Converter</a></dt>
@@ -19575,8 +19561,6 @@
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/MultiHConnection.html#createBatchPool-org.apache.hadoop.conf.Configuration-">createBatchPool(Configuration)</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/MultiHConnection.html" title="class in org.apache.hadoop.hbase.util">MultiHConnection</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createBlockAdder--">createBlockAdder()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/BlockCacheFactory.html#createBlockCache-org.apache.hadoop.conf.Configuration-">createBlockCache(Configuration)</a></span> - Static method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/BlockCacheFactory.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheFactory</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/procedure/ProcedurePrepareLatch.html#createBlockingLatch--">createBlockingLatch()</a></span> - Static method in class org.apache.hadoop.hbase.master.procedure.<a href="org/apache/hadoop/hbase/master/procedure/ProcedurePrepareLatch.html" title="class in org.apache.hadoop.hbase.master.procedure">ProcedurePrepareLatch</a></dt>
@@ -19741,16 +19725,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/mapreduce/Import.html#createCfRenameMap-org.apache.hadoop.conf.Configuration-">createCfRenameMap(Configuration)</a></span> - Static method in class org.apache.hadoop.hbase.mapreduce.<a href="org/apache/hadoop/hbase/mapreduce/Import.html" title="class in org.apache.hadoop.hbase.mapreduce">Import</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html#createChecksum-org.apache.hadoop.hdfs.DFSClient-">createChecksum(DFSClient)</a></span> - Method in interface org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createChecksum-org.apache.hadoop.hdfs.DFSClient-">createChecksum(DFSClient)</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createChecksumCreater--">createChecksumCreater()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
-<dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createChecksumCreater27-java.lang.reflect.Method-java.lang.Class-">createChecksumCreater27(Method, Class&lt;?&gt;)</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
-<dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createChecksumCreater28-java.lang.reflect.Method-java.lang.Class-">createChecksumCreater28(Method, Class&lt;?&gt;)</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/ChunkCreator.html#createChunk-boolean-org.apache.hadoop.hbase.regionserver.CompactingMemStore.IndexType-int-">createChunk(boolean, CompactingMemStore.IndexType, int)</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/ChunkCreator.html" title="class in org.apache.hadoop.hbase.regionserver">ChunkCreator</a></dt>
 <dd>
 <div class="block">Creates the chunk either onheap or offheap</div>
@@ -20852,22 +20828,12 @@
 <dd>
 <div class="block">Create an HFileLink relative path for the table/region/family/hfile location</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createPBHelper--">createPBHelper()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
-<dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createPBHelper--">createPBHelper()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/replication/ReplicationPeers.html#createPeer-java.lang.String-">createPeer(String)</a></span> - Method in class org.apache.hadoop.hbase.replication.<a href="org/apache/hadoop/hbase/replication/ReplicationPeers.html" title="class in org.apache.hadoop.hbase.replication">ReplicationPeers</a></dt>
 <dd>
 <div class="block">Helper method to connect to a peer</div>
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/replication/SyncReplicationReplayWALManager.html#createPeerRemoteWALDir-java.lang.String-">createPeerRemoteWALDir(String)</a></span> - Method in class org.apache.hadoop.hbase.master.replication.<a href="org/apache/hadoop/hbase/master/replication/SyncReplicationReplayWALManager.html" title="class in org.apache.hadoop.hbase.master.replication">SyncReplicationReplayWALManager</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createPipelineAckStatusGetter--">createPipelineAckStatusGetter()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
-<dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createPipelineAckStatusGetter26--">createPipelineAckStatusGetter26()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
-<dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createPipelineAckStatusGetter27--">createPipelineAckStatusGetter27()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html#createPlainCompressionStream-java.io.OutputStream-org.apache.hadoop.io.compress.Compressor-">createPlainCompressionStream(OutputStream, Compressor)</a></span> - Method in enum org.apache.hadoop.hbase.io.compress.<a href="org/apache/hadoop/hbase/io/compress/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.compress">Compression.Algorithm</a></dt>
 <dd>
 <div class="block">Creates a compression stream without any additional wrapping into
@@ -21439,8 +21405,6 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/monitoring/TaskMonitor.html#createStatus-java.lang.String-">createStatus(String)</a></span> - Method in class org.apache.hadoop.hbase.monitoring.<a href="org/apache/hadoop/hbase/monitoring/TaskMonitor.html" title="class in org.apache.hadoop.hbase.monitoring">TaskMonitor</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createStorageTypeSetter--">createStorageTypeSetter()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/quotas/RegionSizeStoreFactory.html#createStore--">createStore()</a></span> - Method in class org.apache.hadoop.hbase.quotas.<a href="org/apache/hadoop/hbase/quotas/RegionSizeStoreFactory.html" title="class in org.apache.hadoop.hbase.quotas">RegionSizeStoreFactory</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html#createStoreDir-java.lang.String-">createStoreDir(String)</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegionFileSystem.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionFileSystem</a></dt>
@@ -21721,9 +21685,9 @@
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createTransparentCryptoHelper--">createTransparentCryptoHelper()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createTransparentCryptoHelper27--">createTransparentCryptoHelper27()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createTransparentCryptoHelperWithHDFS12396--">createTransparentCryptoHelperWithHDFS12396()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createTransparentCryptoHelper28--">createTransparentCryptoHelper28()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper</a></dt>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createTransparentCryptoHelperWithoutHDFS12396--">createTransparentCryptoHelperWithoutHDFS12396()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.html#createTreeSet--">createTreeSet()</a></span> - Method in class org.apache.hadoop.hbase.filter.<a href="org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.html" title="class in org.apache.hadoop.hbase.filter">MultipleColumnPrefixFilter</a></dt>
 <dd>&nbsp;</dd>
@@ -31241,12 +31205,8 @@
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#FanOutOneBlockAsyncDFSOutputHelper--">FanOutOneBlockAsyncDFSOutputHelper()</a></span> - Constructor for class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
 <dd>&nbsp;</dd>
-<dt><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</span></a> - Interface in <a href="org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></dt>
-<dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose</span></a> - Class in <a href="org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></dt>
 <dd>&nbsp;</dd>
-<dt><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</span></a> - Interface in <a href="org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></dt>
-<dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</span></a> - Interface in <a href="org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></dt>
 <dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</span></a> - Interface in <a href="org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></dt>
@@ -31257,12 +31217,6 @@
 <dd>
 <div class="block">Exception other than RemoteException thrown when calling create on namenode</div>
 </dd>
-<dt><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</span></a> - Interface in <a href="org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></dt>
-<dd>&nbsp;</dd>
-<dt><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</span></a> - Interface in <a href="org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></dt>
-<dd>&nbsp;</dd>
-<dt><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</span></a> - Interface in <a href="org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></dt>
-<dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputSaslHelper</span></a> - Class in <a href="org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></dt>
 <dd>
 <div class="block">Helper class for adding sasl support for <a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><code>FanOutOneBlockAsyncDFSOutput</code></a>.</div>
@@ -31273,8 +31227,6 @@
 <dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler</span></a> - Class in <a href="org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></dt>
 <dd>&nbsp;</dd>
-<dt><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</span></a> - Interface in <a href="org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></dt>
-<dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor</span></a> - Interface in <a href="org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></dt>
 <dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler</span></a> - Class in <a href="org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></dt>
@@ -34761,8 +34713,6 @@
 <dd>
 <div class="block">Create a new HFileSystem object, similar to FileSystem.get().</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html#get-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto-">get(DataTransferProtos.PipelineAckProto)</a></span> - Method in interface org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncf [...]
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html#get-java.lang.Object-">get(Object)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFile.FileInfo.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile.FileInfo</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/ImmutableBytesWritable.html#get--">get()</a></span> - Method in class org.apache.hadoop.hbase.io.<a href="org/apache/hadoop/hbase/io/ImmutableBytesWritable.html" title="class in org.apache.hadoop.hbase.io">ImmutableBytesWritable</a></dt>
@@ -62633,7 +62583,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RangeIteration.html#initialize-boolean-">initialize(boolean)</a></span> - Method in class org.apache.hadoop.hbase.filter.<a href="org/apache/hadoop/hbase/filter/MultiRowRangeFilter.RangeIteration.html" title="class in org.apache.hadoop.hbase.filter">MultiRowRangeFilter.RangeIteration</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#initialize-org.apache.hadoop.conf.Configuration-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.hdfs.protocol.DatanodeInfo-java.lang.Enum-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-int-org.apache.hadoop.hdfs.DFSClient-org.apache.hadoop.security.token.Token-org.apache.hbase.thirdparty.io.netty.util.concurrent.Pr [...]
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#initialize-org.apache.hadoop.conf.Configuration-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.hdfs.protocol.DatanodeInfo-org.apache.hadoop.fs.StorageType-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-int-org.apache.hadoop.hdfs.DFSClient-org.apache.hadoop.security.token.Token-org.apache.hbase.thirdparty.io.netty. [...]
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexReader.html#initialize-int-">initialize(int)</a></span> - Method in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.BlockIndexReader.html" title="class in org.apache.hadoop.hbase.io.hfile">HFileBlockIndex.BlockIndexReader</a></dt>
 <dd>&nbsp;</dd>
@@ -83923,10 +83873,6 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/metrics/impl/RefCountingMap.Payload.html#Payload-V-">Payload(V)</a></span> - Constructor for class org.apache.hadoop.hbase.metrics.impl.<a href="org/apache/hadoop/hbase/metrics/impl/RefCountingMap.Payload.html" title="class in org.apache.hadoop.hbase.metrics.impl">RefCountingMap.Payload</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#PB_HELPER">PB_HELPER</a></span> - Static variable in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
-<dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#PB_HELPER">PB_HELPER</a></span> - Static variable in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.html#PB_WAL_COMPLETE_MAGIC">PB_WAL_COMPLETE_MAGIC</a></span> - Static variable in class org.apache.hadoop.hbase.regionserver.wal.<a href="org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.html" title="class in org.apache.hadoop.hbase.regionserver.wal">ProtobufLogReader</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.html#PB_WAL_MAGIC">PB_WAL_MAGIC</a></span> - Static variable in class org.apache.hadoop.hbase.regionserver.wal.<a href="org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.html" title="class in org.apache.hadoop.hbase.regionserver.wal">ProtobufLogReader</a></dt>
@@ -84419,8 +84365,6 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/CompactionPipeline.html#pipeline">pipeline</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/CompactionPipeline.html" title="class in org.apache.hadoop.hbase.regionserver">CompactionPipeline</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#PIPELINE_ACK_STATUS_GETTER">PIPELINE_ACK_STATUS_GETTER</a></span> - Static variable in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html#PipelineController--">PipelineController()</a></span> - Constructor for class org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html" title="class in org.apache.hadoop.hbase.wal">WALSplitter.PipelineController</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.html#pipelineThreshold">pipelineThreshold</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreCompactionStrategy</a></dt>
@@ -96885,7 +96829,7 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.html#requestTooBigMessage">requestTooBigMessage</a></span> - Variable in class org.apache.hadoop.hbase.ipc.<a href="org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.html" title="class in org.apache.hadoop.hbase.ipc">NettyRpcFrameDecoder</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#requestWriteBlock-org.apache.hbase.thirdparty.io.netty.channel.Channel-java.lang.Enum-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-">requestWriteBlock(Channel, Enum&lt;?&gt;, DataTransferProtos.OpWriteBlockProto.Builder)</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/F [...]
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#requestWriteBlock-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.fs.StorageType-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-">requestWriteBlock(Channel, StorageType, DataTransferProtos.OpWriteBlockProto.Builder)</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hb [...]
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushRegionEntry.html#requeue-long-">requeue(long)</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/MemStoreFlusher.FlushRegionEntry.html" title="class in org.apache.hadoop.hbase.regionserver">MemStoreFlusher.FlushRegionEntry</a></dt>
 <dd>&nbsp;</dd>
@@ -103556,8 +103500,6 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.html#set-java.lang.String-java.lang.String-java.lang.String-">set(String, String, String)</a></span> - Method in class org.apache.hadoop.hbase.coprocessor.<a href="org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.html" title="class in org.apache.hadoop.hbase.coprocessor">ReadOnlyConfiguration</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html#set-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-java.lang.Enum-">set(DataTransferProtos.OpWriteBlockProto.Builder, Enum&lt;?&gt;)</a></span> - Method in interface org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface  [...]
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/ImmutableBytesWritable.html#set-byte:A-">set(byte[])</a></span> - Method in class org.apache.hadoop.hbase.io.<a href="org/apache/hadoop/hbase/io/ImmutableBytesWritable.html" title="class in org.apache.hadoop.hbase.io">ImmutableBytesWritable</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/ImmutableBytesWritable.html#set-byte:A-int-int-">set(byte[], int, int)</a></span> - Method in class org.apache.hadoop.hbase.io.<a href="org/apache/hadoop/hbase/io/ImmutableBytesWritable.html" title="class in org.apache.hadoop.hbase.io">ImmutableBytesWritable</a></dt>
@@ -113522,8 +113464,6 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html#STORAGE_POLICY_PROPERTY_CF_PREFIX">STORAGE_POLICY_PROPERTY_CF_PREFIX</a></span> - Static variable in class org.apache.hadoop.hbase.mapreduce.<a href="org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html" title="class in org.apache.hadoop.hbase.mapreduce">HFileOutputFormat2</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#STORAGE_TYPE_SETTER">STORAGE_TYPE_SETTER</a></span> - Static variable in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
-<dd>&nbsp;</dd>
 <dt><a href="org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.html" title="class in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">StorageClusterStatusModel</span></a> - Class in <a href="org/apache/hadoop/hbase/rest/model/package-summary.html">org.apache.hadoop.hbase.rest.model</a></dt>
 <dd>
 <div class="block">Representation of the status of a storage cluster:</div>
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 54e5ea1..b55741e 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -167,9 +167,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupRestoreConstants.BackupCommand</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupType</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index b8d09da..ce5da2a 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -549,24 +549,24 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncRequestFutureImpl.Retry</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncProcessTask.SubmittedRows.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncProcessTask.SubmittedRows</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/ScannerCallable.MoreResults.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">ScannerCallable.MoreResults</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncRequestFutureImpl.Retry</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html b/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
index 531254c..127caa7 100644
--- a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
@@ -104,8 +104,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/ExecutorType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">ExecutorType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/EventType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">EventType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/ExecutorType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">ExecutorType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index f32fad9..f18aaa3 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -189,13 +189,13 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/http/package-tree.html b/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
index 8078bf2..611f4f2 100644
--- a/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
@@ -138,9 +138,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Event.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Event</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/HttpConfig.Policy.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">HttpConfig.Policy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Output.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Output</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Event.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Event</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
deleted file mode 100644
index 6a70ef9..0000000
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
+++ /dev/null
@@ -1,243 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!-- NewPage -->
-<html lang="en">
-<head>
-<!-- Generated by javadoc -->
-<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>FanOutOneBlockAsyncDFSOutputHelper.BlockAdder (Apache HBase 3.0.0-SNAPSHOT API)</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
-<script type="text/javascript" src="../../../../../../script.js"></script>
-</head>
-<body>
-<script type="text/javascript"><!--
-    try {
-        if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="FanOutOneBlockAsyncDFSOutputHelper.BlockAdder (Apache HBase 3.0.0-SNAPSHOT API)";
-        }
-    }
-    catch(err) {
-    }
-//-->
-var methods = {"i0":6};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-</script>
-<noscript>
-<div>JavaScript is disabled on your browser.</div>
-</noscript>
-<!-- ========= START OF TOP NAVBAR ======= -->
-<div class="topNav"><a name="navbar.top">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.top.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="package-summary.html">Package</a></li>
-<li class="navBarCell1Rev">Class</li>
-<li><a href="class-use/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html">Use</a></li>
-<li><a href="package-tree.html">Tree</a></li>
-<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_top">
-<li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_top");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<div>
-<ul class="subNavList">
-<li>Summary:&nbsp;</li>
-<li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.summary">Method</a></li>
-</ul>
-<ul class="subNavList">
-<li>Detail:&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.detail">Method</a></li>
-</ul>
-</div>
-<a name="skip.navbar.top">
-<!--   -->
-</a></div>
-<!-- ========= END OF TOP NAVBAR ========= -->
-<!-- ======== START OF CLASS DATA ======== -->
-<div class="header">
-<div class="subTitle">org.apache.hadoop.hbase.io.asyncfs</div>
-<h2 title="Interface FanOutOneBlockAsyncDFSOutputHelper.BlockAdder" class="title">Interface FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</h2>
-</div>
-<div class="contentContainer">
-<div class="description">
-<ul class="blockList">
-<li class="blockList">
-<dl>
-<dt>Enclosing class:</dt>
-<dd><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dd>
-</dl>
-<hr>
-<br>
-<pre>private static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.159">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</a></pre>
-</li>
-</ul>
-</div>
-<div class="summary">
-<ul class="blockList">
-<li class="blockList">
-<!-- ========== METHOD SUMMARY =========== -->
-<ul class="blockList">
-<li class="blockList"><a name="method.summary">
-<!--   -->
-</a>
-<h3>Method Summary</h3>
-<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
-<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t3" class="tableTab"><span><a href="javascript:show(4);">Abstract Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tr id="i0" class="altColor">
-<td class="colFirst"><code>org.apache.hadoop.hdfs.protocol.LocatedBlock</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html#addBlock-org.apache.hadoop.hdfs.protocol.ClientProtocol-java.lang.String-java.lang.String-org.apache.hadoop.hdfs.protocol.ExtendedBlock-org.apache.hadoop.hdfs.protocol.DatanodeInfo:A-long-java.lang.String:A-">addBlock</a></span>(org.apache.hadoop.hdfs.protocol.ClientProtocol&nbsp;namenode,
-        <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;src,
-        <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;clientName,
-        org.apache.hadoop.hdfs.protocol.ExtendedBlock&nbsp;previous,
-        org.apache.hadoop.hdfs.protocol.DatanodeInfo[]&nbsp;excludeNodes,
-        long&nbsp;fileId,
-        <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;favoredNodes)</code>&nbsp;</td>
-</tr>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-<div class="details">
-<ul class="blockList">
-<li class="blockList">
-<!-- ============ METHOD DETAIL ========== -->
-<ul class="blockList">
-<li class="blockList"><a name="method.detail">
-<!--   -->
-</a>
-<h3>Method Detail</h3>
-<a name="addBlock-org.apache.hadoop.hdfs.protocol.ClientProtocol-java.lang.String-java.lang.String-org.apache.hadoop.hdfs.protocol.ExtendedBlock-org.apache.hadoop.hdfs.protocol.DatanodeInfo:A-long-java.lang.String:A-">
-<!--   -->
-</a>
-<ul class="blockListLast">
-<li class="blockList">
-<h4>addBlock</h4>
-<pre>org.apache.hadoop.hdfs.protocol.LocatedBlock&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html#line.161">addBlock</a>(org.apache.hadoop.hdfs.protocol.ClientProtocol&nbsp;namenode,
-                                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;src,
-                                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;clientName,
-                                                      org.apache.hadoop.hdfs.protocol.ExtendedBlock&nbsp;previous,
-                                                      org.apache.hadoop.hdfs.protocol.DatanodeInfo[]&nbsp;excludeNodes,
-                                                      long&nbsp;fileId,
-                                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;favoredNodes)
-                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
-<dl>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
-</dl>
-</li>
-</ul>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-</div>
-<!-- ========= END OF CLASS DATA ========= -->
-<!-- ======= START OF BOTTOM NAVBAR ====== -->
-<div class="bottomNav"><a name="navbar.bottom">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.bottom.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="package-summary.html">Package</a></li>
-<li class="navBarCell1Rev">Class</li>
-<li><a href="class-use/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html">Use</a></li>
-<li><a href="package-tree.html">Tree</a></li>
-<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_bottom">
-<li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_bottom");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<div>
-<ul class="subNavList">
-<li>Summary:&nbsp;</li>
-<li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.summary">Method</a></li>
-</ul>
-<ul class="subNavList">
-<li>Detail:&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.detail">Method</a></li>
-</ul>
-</div>
-<a name="skip.navbar.bottom">
-<!--   -->
-</a></div>
-<!-- ======== END OF BOTTOM NAVBAR ======= -->
-<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2019 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
-</body>
-</html>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
index 88145b2..5e1aa9f 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
@@ -49,8 +49,8 @@ var activeTableTab = "activeTableTab";
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html" target="_top">Frames</a></li>
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.534">FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose</a>
+<pre>static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.258">FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html" title="interface in org.apache.hadoop.hbase.util">CancelableProgressable</a></pre>
 </li>
@@ -208,7 +208,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/util/CancelablePro
 <ul class="blockListLast">
 <li class="blockList">
 <h4>client</h4>
-<pre>private final&nbsp;org.apache.hadoop.hdfs.DFSClient <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html#line.536">client</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.hdfs.DFSClient <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html#line.260">client</a></pre>
 </li>
 </ul>
 </li>
@@ -225,7 +225,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/util/CancelablePro
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CancelOnClose</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html#line.538">CancelOnClose</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client)</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html#line.262">CancelOnClose</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client)</pre>
 </li>
 </ul>
 </li>
@@ -242,7 +242,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/util/CancelablePro
 <ul class="blockListLast">
 <li class="blockList">
 <h4>progress</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html#line.543">progress</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html#line.267">progress</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html#progress--">CancelableProgressable</a></code></span></div>
 <div class="block">Report progress.  Returns true if operations should continue, false if the
  operation should be canceled and rolled back.</div>
@@ -282,8 +282,8 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/util/CancelablePro
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html" target="_top">Frames</a></li>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
deleted file mode 100644
index 57c0c33..0000000
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
+++ /dev/null
@@ -1,226 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!-- NewPage -->
-<html lang="en">
-<head>
-<!-- Generated by javadoc -->
-<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater (Apache HBase 3.0.0-SNAPSHOT API)</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
-<script type="text/javascript" src="../../../../../../script.js"></script>
-</head>
-<body>
-<script type="text/javascript"><!--
-    try {
-        if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater (Apache HBase 3.0.0-SNAPSHOT API)";
-        }
-    }
-    catch(err) {
-    }
-//-->
-var methods = {"i0":6};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-</script>
-<noscript>
-<div>JavaScript is disabled on your browser.</div>
-</noscript>
-<!-- ========= START OF TOP NAVBAR ======= -->
-<div class="topNav"><a name="navbar.top">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.top.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="package-summary.html">Package</a></li>
-<li class="navBarCell1Rev">Class</li>
-<li><a href="class-use/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html">Use</a></li>
-<li><a href="package-tree.html">Tree</a></li>
-<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_top">
-<li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_top");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<div>
-<ul class="subNavList">
-<li>Summary:&nbsp;</li>
-<li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.summary">Method</a></li>
-</ul>
-<ul class="subNavList">
-<li>Detail:&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.detail">Method</a></li>
-</ul>
-</div>
-<a name="skip.navbar.top">
-<!--   -->
-</a></div>
-<!-- ========= END OF TOP NAVBAR ========= -->
-<!-- ======== START OF CLASS DATA ======== -->
-<div class="header">
-<div class="subTitle">org.apache.hadoop.hbase.io.asyncfs</div>
-<h2 title="Interface FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater" class="title">Interface FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</h2>
-</div>
-<div class="contentContainer">
-<div class="description">
-<ul class="blockList">
-<li class="blockList">
-<dl>
-<dt>Enclosing class:</dt>
-<dd><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dd>
-</dl>
-<hr>
-<br>
-<pre>private static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.197">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></pre>
-</li>
-</ul>
-</div>
-<div class="summary">
-<ul class="blockList">
-<li class="blockList">
-<!-- ========== METHOD SUMMARY =========== -->
-<ul class="blockList">
-<li class="blockList"><a name="method.summary">
-<!--   -->
-</a>
-<h3>Method Summary</h3>
-<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
-<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t3" class="tableTab"><span><a href="javascript:show(4);">Abstract Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tr id="i0" class="altColor">
-<td class="colFirst"><code>org.apache.hadoop.util.DataChecksum</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html#createChecksum-org.apache.hadoop.hdfs.DFSClient-">createChecksum</a></span>(org.apache.hadoop.hdfs.DFSClient&nbsp;client)</code>&nbsp;</td>
-</tr>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-<div class="details">
-<ul class="blockList">
-<li class="blockList">
-<!-- ============ METHOD DETAIL ========== -->
-<ul class="blockList">
-<li class="blockList"><a name="method.detail">
-<!--   -->
-</a>
-<h3>Method Detail</h3>
-<a name="createChecksum-org.apache.hadoop.hdfs.DFSClient-">
-<!--   -->
-</a>
-<ul class="blockListLast">
-<li class="blockList">
-<h4>createChecksum</h4>
-<pre>org.apache.hadoop.util.DataChecksum&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html#line.198">createChecksum</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client)</pre>
-</li>
-</ul>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-</div>
-<!-- ========= END OF CLASS DATA ========= -->
-<!-- ======= START OF BOTTOM NAVBAR ====== -->
-<div class="bottomNav"><a name="navbar.bottom">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.bottom.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="package-summary.html">Package</a></li>
-<li class="navBarCell1Rev">Class</li>
-<li><a href="class-use/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html">Use</a></li>
-<li><a href="package-tree.html">Tree</a></li>
-<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_bottom">
-<li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_bottom");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<div>
-<ul class="subNavList">
-<li>Summary:&nbsp;</li>
-<li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.summary">Method</a></li>
-</ul>
-<ul class="subNavList">
-<li>Detail:&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.detail">Method</a></li>
-</ul>
-</div>
-<a name="skip.navbar.bottom">
-<!--   -->
-</a></div>
-<!-- ======== END OF BOTTOM NAVBAR ======= -->
-<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2019 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
-</body>
-</html>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
index 21abc00..7965b8b 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.179">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</a></pre>
+<pre>private static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.148">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</a></pre>
 </li>
 </ul>
 </div>
@@ -149,7 +149,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isClientRunning</h4>
-<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html#line.181">isClientRunning</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client)</pre>
+<pre>boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html#line.150">isClientRunning</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client)</pre>
 </li>
 </ul>
 </li>
@@ -180,7 +180,7 @@ var activeTableTab = "activeTableTab";
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html
index 0ada423..4400e13 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.204">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a></pre>
+<pre>private static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.156">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a></pre>
 </li>
 </ul>
 </div>
@@ -169,7 +169,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>create</h4>
-<pre>default&nbsp;org.apache.hadoop.hdfs.protocol.HdfsFileStatus&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html#line.205">create</a>(org.apache.hadoop.hdfs.protocol.ClientProtocol&nbsp;instance,
+<pre>default&nbsp;org.apache.hadoop.hdfs.protocol.HdfsFileStatus&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html#line.157">create</a>(org.apache.hadoop.hdfs.protocol.ClientProtocol&nbsp;instance,
                                                               <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;src,
                                                               org.apache.hadoop.fs.permission.FsPermission&nbsp;masked,
                                                               <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;clientName,
@@ -191,7 +191,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockListLast">
 <li class="blockList">
 <h4>createObject</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html#line.221">createObject</a>(org.apache.hadoop.hdfs.protocol.ClientProtocol&nbsp;instance,
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html#line.173">createObject</a>(org.apache.hadoop.hdfs.protocol.ClientProtocol&nbsp;instance,
                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;src,
                     org.apache.hadoop.fs.permission.FsPermission&nbsp;masked,
                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;clientName,
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
index bdf20d0..7e2f39f 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.168">FanOutOneBlockAsyncDFSOutputHelper.LeaseManager</a></pre>
+<pre>private static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.137">FanOutOneBlockAsyncDFSOutputHelper.LeaseManager</a></pre>
 </li>
 </ul>
 </div>
@@ -155,7 +155,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>begin</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html#line.170">begin</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html#line.139">begin</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
            long&nbsp;inodeId)</pre>
 </li>
 </ul>
@@ -165,7 +165,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockListLast">
 <li class="blockList">
 <h4>end</h4>
-<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html#line.172">end</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
+<pre>void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html#line.141">end</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
          long&nbsp;inodeId)</pre>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
index 26a84d9..240d3d2 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
@@ -44,7 +44,7 @@
 <div class="subNav">
 <ul class="navList">
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html" target="_top">Frames</a></li>
@@ -126,7 +126,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.734">FanOutOneBlockAsyncDFSOutputHelper.NameNodeException</a>
+<pre>public static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.462">FanOutOneBlockAsyncDFSOutputHelper.NameNodeException</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Exception other than RemoteException thrown when calling create on namenode</div>
 <dl>
@@ -215,7 +215,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>serialVersionUID</h4>
-<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html#line.736">serialVersionUID</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html#line.464">serialVersionUID</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.serialVersionUID">Constant Field Values</a></dd>
@@ -236,7 +236,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>NameNodeException</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html#line.738">NameNodeException</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&nbsp;cause)</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html#line.466">NameNodeException</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&nbsp;cause)</pre>
 </li>
 </ul>
 </li>
@@ -268,7 +268,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.h
 <div class="subNav">
 <ul class="navList">
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html" target="_top">Frames</a></li>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html
deleted file mode 100644
index aee64af..0000000
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html
+++ /dev/null
@@ -1,239 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!-- NewPage -->
-<html lang="en">
-<head>
-<!-- Generated by javadoc -->
-<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>FanOutOneBlockAsyncDFSOutputHelper.PBHelper (Apache HBase 3.0.0-SNAPSHOT API)</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
-<script type="text/javascript" src="../../../../../../script.js"></script>
-</head>
-<body>
-<script type="text/javascript"><!--
-    try {
-        if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="FanOutOneBlockAsyncDFSOutputHelper.PBHelper (Apache HBase 3.0.0-SNAPSHOT API)";
-        }
-    }
-    catch(err) {
-    }
-//-->
-var methods = {"i0":6,"i1":6};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-</script>
-<noscript>
-<div>JavaScript is disabled on your browser.</div>
-</noscript>
-<!-- ========= START OF TOP NAVBAR ======= -->
-<div class="topNav"><a name="navbar.top">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.top.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="package-summary.html">Package</a></li>
-<li class="navBarCell1Rev">Class</li>
-<li><a href="class-use/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html">Use</a></li>
-<li><a href="package-tree.html">Tree</a></li>
-<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_top">
-<li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_top");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<div>
-<ul class="subNavList">
-<li>Summary:&nbsp;</li>
-<li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.summary">Method</a></li>
-</ul>
-<ul class="subNavList">
-<li>Detail:&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.detail">Method</a></li>
-</ul>
-</div>
-<a name="skip.navbar.top">
-<!--   -->
-</a></div>
-<!-- ========= END OF TOP NAVBAR ========= -->
-<!-- ======== START OF CLASS DATA ======== -->
-<div class="header">
-<div class="subTitle">org.apache.hadoop.hbase.io.asyncfs</div>
-<h2 title="Interface FanOutOneBlockAsyncDFSOutputHelper.PBHelper" class="title">Interface FanOutOneBlockAsyncDFSOutputHelper.PBHelper</h2>
-</div>
-<div class="contentContainer">
-<div class="description">
-<ul class="blockList">
-<li class="blockList">
-<dl>
-<dt>Enclosing class:</dt>
-<dd><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dd>
-</dl>
-<hr>
-<br>
-<pre>private static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.187">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a></pre>
-</li>
-</ul>
-</div>
-<div class="summary">
-<ul class="blockList">
-<li class="blockList">
-<!-- ========== METHOD SUMMARY =========== -->
-<ul class="blockList">
-<li class="blockList"><a name="method.summary">
-<!--   -->
-</a>
-<h3>Method Summary</h3>
-<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
-<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t3" class="tableTab"><span><a href="javascript:show(4);">Abstract Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tr id="i0" class="altColor">
-<td class="colFirst"><code>org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html#convert-org.apache.hadoop.hdfs.protocol.ExtendedBlock-">convert</a></span>(org.apache.hadoop.hdfs.protocol.ExtendedBlock&nbsp;b)</code>&nbsp;</td>
-</tr>
-<tr id="i1" class="rowColor">
-<td class="colFirst"><code>org.apache.hadoop.security.proto.SecurityProtos.TokenProto</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html#convert-org.apache.hadoop.security.token.Token-">convert</a></span>(org.apache.hadoop.security.token.Token&lt;?&gt;&nbsp;tok)</code>&nbsp;</td>
-</tr>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-<div class="details">
-<ul class="blockList">
-<li class="blockList">
-<!-- ============ METHOD DETAIL ========== -->
-<ul class="blockList">
-<li class="blockList"><a name="method.detail">
-<!--   -->
-</a>
-<h3>Method Detail</h3>
-<a name="convert-org.apache.hadoop.hdfs.protocol.ExtendedBlock-">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>convert</h4>
-<pre>org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html#line.189">convert</a>(org.apache.hadoop.hdfs.protocol.ExtendedBlock&nbsp;b)</pre>
-</li>
-</ul>
-<a name="convert-org.apache.hadoop.security.token.Token-">
-<!--   -->
-</a>
-<ul class="blockListLast">
-<li class="blockList">
-<h4>convert</h4>
-<pre>org.apache.hadoop.security.proto.SecurityProtos.TokenProto&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html#line.191">convert</a>(org.apache.hadoop.security.token.Token&lt;?&gt;&nbsp;tok)</pre>
-</li>
-</ul>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-</div>
-<!-- ========= END OF CLASS DATA ========= -->
-<!-- ======= START OF BOTTOM NAVBAR ====== -->
-<div class="bottomNav"><a name="navbar.bottom">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.bottom.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="package-summary.html">Package</a></li>
-<li class="navBarCell1Rev">Class</li>
-<li><a href="class-use/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html">Use</a></li>
-<li><a href="package-tree.html">Tree</a></li>
-<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_bottom">
-<li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_bottom");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<div>
-<ul class="subNavList">
-<li>Summary:&nbsp;</li>
-<li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.summary">Method</a></li>
-</ul>
-<ul class="subNavList">
-<li>Detail:&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.detail">Method</a></li>
-</ul>
-</div>
-<a name="skip.navbar.bottom">
-<!--   -->
-</a></div>
-<!-- ======== END OF BOTTOM NAVBAR ======= -->
-<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2019 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
-</body>
-</html>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html
deleted file mode 100644
index 243d526..0000000
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html
+++ /dev/null
@@ -1,226 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!-- NewPage -->
-<html lang="en">
-<head>
-<!-- Generated by javadoc -->
-<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter (Apache HBase 3.0.0-SNAPSHOT API)</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
-<script type="text/javascript" src="../../../../../../script.js"></script>
-</head>
-<body>
-<script type="text/javascript"><!--
-    try {
-        if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter (Apache HBase 3.0.0-SNAPSHOT API)";
-        }
-    }
-    catch(err) {
-    }
-//-->
-var methods = {"i0":6};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-</script>
-<noscript>
-<div>JavaScript is disabled on your browser.</div>
-</noscript>
-<!-- ========= START OF TOP NAVBAR ======= -->
-<div class="topNav"><a name="navbar.top">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.top.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="package-summary.html">Package</a></li>
-<li class="navBarCell1Rev">Class</li>
-<li><a href="class-use/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html">Use</a></li>
-<li><a href="package-tree.html">Tree</a></li>
-<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_top">
-<li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_top");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<div>
-<ul class="subNavList">
-<li>Summary:&nbsp;</li>
-<li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.summary">Method</a></li>
-</ul>
-<ul class="subNavList">
-<li>Detail:&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.detail">Method</a></li>
-</ul>
-</div>
-<a name="skip.navbar.top">
-<!--   -->
-</a></div>
-<!-- ========= END OF TOP NAVBAR ========= -->
-<!-- ======== START OF CLASS DATA ======== -->
-<div class="header">
-<div class="subTitle">org.apache.hadoop.hbase.io.asyncfs</div>
-<h2 title="Interface FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter" class="title">Interface FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</h2>
-</div>
-<div class="contentContainer">
-<div class="description">
-<ul class="blockList">
-<li class="blockList">
-<dl>
-<dt>Enclosing class:</dt>
-<dd><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dd>
-</dl>
-<hr>
-<br>
-<pre>private static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.143">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a></pre>
-</li>
-</ul>
-</div>
-<div class="summary">
-<ul class="blockList">
-<li class="blockList">
-<!-- ========== METHOD SUMMARY =========== -->
-<ul class="blockList">
-<li class="blockList"><a name="method.summary">
-<!--   -->
-</a>
-<h3>Method Summary</h3>
-<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
-<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t3" class="tableTab"><span><a href="javascript:show(4);">Abstract Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tr id="i0" class="altColor">
-<td class="colFirst"><code>org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html#get-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto-">get</a></span>(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto&nbsp;ack)</code>&nbsp;</td>
-</tr>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-<div class="details">
-<ul class="blockList">
-<li class="blockList">
-<!-- ============ METHOD DETAIL ========== -->
-<ul class="blockList">
-<li class="blockList"><a name="method.detail">
-<!--   -->
-</a>
-<h3>Method Detail</h3>
-<a name="get-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto-">
-<!--   -->
-</a>
-<ul class="blockListLast">
-<li class="blockList">
-<h4>get</h4>
-<pre>org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html#line.144">get</a>(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto&nbsp;ack)</pre>
-</li>
-</ul>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-</div>
-<!-- ========= END OF CLASS DATA ========= -->
-<!-- ======= START OF BOTTOM NAVBAR ====== -->
-<div class="bottomNav"><a name="navbar.bottom">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.bottom.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="package-summary.html">Package</a></li>
-<li class="navBarCell1Rev">Class</li>
-<li><a href="class-use/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html">Use</a></li>
-<li><a href="package-tree.html">Tree</a></li>
-<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_bottom">
-<li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_bottom");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<div>
-<ul class="subNavList">
-<li>Summary:&nbsp;</li>
-<li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.summary">Method</a></li>
-</ul>
-<ul class="subNavList">
-<li>Detail:&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.detail">Method</a></li>
-</ul>
-</div>
-<a name="skip.navbar.bottom">
-<!--   -->
-</a></div>
-<!-- ======== END OF BOTTOM NAVBAR ======= -->
-<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2019 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
-</body>
-</html>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html
deleted file mode 100644
index 7c6e634..0000000
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html
+++ /dev/null
@@ -1,228 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!-- NewPage -->
-<html lang="en">
-<head>
-<!-- Generated by javadoc -->
-<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter (Apache HBase 3.0.0-SNAPSHOT API)</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
-<script type="text/javascript" src="../../../../../../script.js"></script>
-</head>
-<body>
-<script type="text/javascript"><!--
-    try {
-        if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter (Apache HBase 3.0.0-SNAPSHOT API)";
-        }
-    }
-    catch(err) {
-    }
-//-->
-var methods = {"i0":6};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-</script>
-<noscript>
-<div>JavaScript is disabled on your browser.</div>
-</noscript>
-<!-- ========= START OF TOP NAVBAR ======= -->
-<div class="topNav"><a name="navbar.top">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.top.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="package-summary.html">Package</a></li>
-<li class="navBarCell1Rev">Class</li>
-<li><a href="class-use/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html">Use</a></li>
-<li><a href="package-tree.html">Tree</a></li>
-<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_top">
-<li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_top");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<div>
-<ul class="subNavList">
-<li>Summary:&nbsp;</li>
-<li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.summary">Method</a></li>
-</ul>
-<ul class="subNavList">
-<li>Detail:&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.detail">Method</a></li>
-</ul>
-</div>
-<a name="skip.navbar.top">
-<!--   -->
-</a></div>
-<!-- ========= END OF TOP NAVBAR ========= -->
-<!-- ======== START OF CLASS DATA ======== -->
-<div class="header">
-<div class="subTitle">org.apache.hadoop.hbase.io.asyncfs</div>
-<h2 title="Interface FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter" class="title">Interface FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</h2>
-</div>
-<div class="contentContainer">
-<div class="description">
-<ul class="blockList">
-<li class="blockList">
-<dl>
-<dt>Enclosing class:</dt>
-<dd><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dd>
-</dl>
-<hr>
-<br>
-<pre>private static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.151">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</a></pre>
-</li>
-</ul>
-</div>
-<div class="summary">
-<ul class="blockList">
-<li class="blockList">
-<!-- ========== METHOD SUMMARY =========== -->
-<ul class="blockList">
-<li class="blockList"><a name="method.summary">
-<!--   -->
-</a>
-<h3>Method Summary</h3>
-<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
-<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t3" class="tableTab"><span><a href="javascript:show(4);">Abstract Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tr id="i0" class="altColor">
-<td class="colFirst"><code>org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html#set-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-java.lang.Enum-">set</a></span>(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder&nbsp;builder,
-   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;?&gt;&nbsp;storageType)</code>&nbsp;</td>
-</tr>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-<div class="details">
-<ul class="blockList">
-<li class="blockList">
-<!-- ============ METHOD DETAIL ========== -->
-<ul class="blockList">
-<li class="blockList"><a name="method.detail">
-<!--   -->
-</a>
-<h3>Method Detail</h3>
-<a name="set-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-java.lang.Enum-">
-<!--   -->
-</a>
-<ul class="blockListLast">
-<li class="blockList">
-<h4>set</h4>
-<pre>org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html#line.152">set</a>(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder&nbsp;builder,
-                                                                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;?&gt;&nbsp;storageType)</pre>
-</li>
-</ul>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-</div>
-<!-- ========= END OF CLASS DATA ========= -->
-<!-- ======= START OF BOTTOM NAVBAR ====== -->
-<div class="bottomNav"><a name="navbar.bottom">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.bottom.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="package-summary.html">Package</a></li>
-<li class="navBarCell1Rev">Class</li>
-<li><a href="class-use/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html">Use</a></li>
-<li><a href="package-tree.html">Tree</a></li>
-<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_bottom">
-<li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_bottom");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<div>
-<ul class="subNavList">
-<li>Summary:&nbsp;</li>
-<li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.summary">Method</a></li>
-</ul>
-<ul class="subNavList">
-<li>Detail:&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.detail">Method</a></li>
-</ul>
-</div>
-<a name="skip.navbar.bottom">
-<!--   -->
-</a></div>
-<!-- ======== END OF BOTTOM NAVBAR ======= -->
-<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2019 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
-</body>
-</html>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html
index 1cecca3..1ede7c4 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9};
+var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 <div class="subNav">
 <ul class="navList">
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.State.html" title="enum in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" target="_top">Frames</a></li>
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.118">FanOutOneBlockAsyncDFSOutputHelper</a>
+public final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.116">FanOutOneBlockAsyncDFSOutputHelper</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Helper class for implementing <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><code>FanOutOneBlockAsyncDFSOutput</code></a>.</div>
 </li>
@@ -132,17 +132,9 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <th class="colLast" scope="col">Class and Description</th>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private static interface&nbsp;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</a></span></code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
 <td class="colFirst"><code>(package private) static class&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
-<td class="colFirst"><code>private static interface&nbsp;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></span></code>&nbsp;</td>
-</tr>
 <tr class="rowColor">
 <td class="colFirst"><code>private static interface&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</a></span></code>&nbsp;</td>
@@ -161,18 +153,6 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <div class="block">Exception other than RemoteException thrown when calling create on namenode</div>
 </td>
 </tr>
-<tr class="rowColor">
-<td class="colFirst"><code>private static interface&nbsp;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a></span></code>&nbsp;</td>
-</tr>
-<tr class="altColor">
-<td class="colFirst"><code>private static interface&nbsp;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a></span></code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><code>private static interface&nbsp;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</a></span></code>&nbsp;</td>
-</tr>
 </table>
 </li>
 </ul>
@@ -197,14 +177,6 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES">ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#BLOCK_ADDER">BLOCK_ADDER</a></span></code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#CHECKSUM_CREATER">CHECKSUM_CREATER</a></span></code>&nbsp;</td>
-</tr>
-<tr class="altColor">
 <td class="colFirst"><code>static int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES">DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES</a></span></code>&nbsp;</td>
 </tr>
@@ -233,21 +205,9 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#PB_HELPER">PB_HELPER</a></span></code>&nbsp;</td>
-</tr>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#PIPELINE_ACK_STATUS_GETTER">PIPELINE_ACK_STATUS_GETTER</a></span></code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
 <td class="colFirst"><code>static int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#READ_TIMEOUT">READ_TIMEOUT</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#STORAGE_TYPE_SETTER">STORAGE_TYPE_SETTER</a></span></code>&nbsp;</td>
-</tr>
 </table>
 </li>
 </ul>
@@ -310,48 +270,30 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends org.apache.hbase.thirdparty.io.netty.channel.Channel&gt;&nbsp;channelClass)</code>&nbsp;</td>
 </tr>
 <tr id="i3" class="rowColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createBlockAdder--">createBlockAdder</a></span>()</code>&nbsp;</td>
-</tr>
-<tr id="i4" class="altColor">
 <td class="colFirst"><code>(package private) static org.apache.hadoop.util.DataChecksum</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createChecksum-org.apache.hadoop.hdfs.DFSClient-">createChecksum</a></span>(org.apache.hadoop.hdfs.DFSClient&nbsp;client)</code>&nbsp;</td>
 </tr>
-<tr id="i5" class="rowColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createChecksumCreater--">createChecksumCreater</a></span>()</code>&nbsp;</td>
-</tr>
-<tr id="i6" class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createChecksumCreater27-java.lang.reflect.Method-java.lang.Class-">createChecksumCreater27</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Method.html?is-external=true" title="class or interface in java.lang.reflect">Method</a>&nbsp;getConfMethod,
-                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;?&gt;&nbsp;confClass)</code>&nbsp;</td>
-</tr>
-<tr id="i7" class="rowColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createChecksumCreater28-java.lang.reflect.Method-java.lang.Class-">createChecksumCreater28</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Method.html?is-external=true" title="class or interface in java.lang.reflect">Method</a>&nbsp;getConfMethod,
-                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;?&gt;&nbsp;confClass)</code>&nbsp;</td>
-</tr>
-<tr id="i8" class="altColor">
+<tr id="i4" class="altColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createDFSClientAdaptor--">createDFSClientAdaptor</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i9" class="rowColor">
+<tr id="i5" class="rowColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createFileCreator--">createFileCreator</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i10" class="altColor">
+<tr id="i6" class="altColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createFileCreator2--">createFileCreator2</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i11" class="rowColor">
+<tr id="i7" class="rowColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createFileCreator3--">createFileCreator3</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i12" class="altColor">
+<tr id="i8" class="altColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.LeaseManager</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createLeaseManager--">createLeaseManager</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i13" class="rowColor">
+<tr id="i9" class="rowColor">
 <td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createOutput-org.apache.hadoop.hdfs.DistributedFileSystem-org.apache.hadoop.fs.Path-boolean-boolean-short-long-org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup-java.lang.Class-">createOutput</a></span>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs,
             org.apache.hadoop.fs.Path&nbsp;f,
@@ -364,7 +306,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <div class="block">Create a <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><code>FanOutOneBlockAsyncDFSOutput</code></a>.</div>
 </td>
 </tr>
-<tr id="i14" class="altColor">
+<tr id="i10" class="altColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createOutput-org.apache.hadoop.hdfs.DistributedFileSystem-java.lang.String-boolean-boolean-short-long-org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup-java.lang.Class-">createOutput</a></span>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs,
             <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;src,
@@ -375,65 +317,45 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup&nbsp;eventLoopGroup,
             <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends org.apache.hbase.thirdparty.io.netty.channel.Channel&gt;&nbsp;channelClass)</code>&nbsp;</td>
 </tr>
-<tr id="i15" class="rowColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createPBHelper--">createPBHelper</a></span>()</code>&nbsp;</td>
-</tr>
-<tr id="i16" class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createPipelineAckStatusGetter--">createPipelineAckStatusGetter</a></span>()</code>&nbsp;</td>
-</tr>
-<tr id="i17" class="rowColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createPipelineAckStatusGetter26--">createPipelineAckStatusGetter26</a></span>()</code>&nbsp;</td>
-</tr>
-<tr id="i18" class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createPipelineAckStatusGetter27--">createPipelineAckStatusGetter27</a></span>()</code>&nbsp;</td>
-</tr>
-<tr id="i19" class="rowColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createStorageTypeSetter--">createStorageTypeSetter</a></span>()</code>&nbsp;</td>
-</tr>
-<tr id="i20" class="altColor">
+<tr id="i11" class="rowColor">
 <td class="colFirst"><code>(package private) static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#endFileLease-org.apache.hadoop.hdfs.DFSClient-long-">endFileLease</a></span>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
             long&nbsp;inodeId)</code>&nbsp;</td>
 </tr>
-<tr id="i21" class="rowColor">
+<tr id="i12" class="altColor">
 <td class="colFirst"><code>(package private) static org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#getStatus-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto-">getStatus</a></span>(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto&nbsp;ack)</code>&nbsp;</td>
 </tr>
-<tr id="i22" class="altColor">
+<tr id="i13" class="rowColor">
 <td class="colFirst"><code>private static void</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#initialize-org.apache.hadoop.conf.Configuration-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.hdfs.protocol.DatanodeInfo-java.lang.Enum-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-int-org.apache.hadoop.hdfs.DFSClient-org.apache.hadoop.security.token.Token-org.apache.hbas [...]
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#initialize-org.apache.hadoop.conf.Configuration-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.hdfs.protocol.DatanodeInfo-org.apache.hadoop.fs.StorageType-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-int-org.apache.hadoop.hdfs.DFSClient-org.apache.hadoop.security.token.Tok [...]
           org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
           org.apache.hadoop.hdfs.protocol.DatanodeInfo&nbsp;dnInfo,
-          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;?&gt;&nbsp;storageType,
+          org.apache.hadoop.fs.StorageType&nbsp;storageType,
           org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder&nbsp;writeBlockProtoBuilder,
           int&nbsp;timeoutMs,
           org.apache.hadoop.hdfs.DFSClient&nbsp;client,
           org.apache.hadoop.security.token.Token&lt;org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier&gt;&nbsp;accessToken,
           org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise&lt;org.apache.hbase.thirdparty.io.netty.channel.Channel&gt;&nbsp;promise)</code>&nbsp;</td>
 </tr>
-<tr id="i23" class="rowColor">
+<tr id="i14" class="altColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#processWriteBlockResponse-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.hdfs.protocol.DatanodeInfo-org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise-int-">processWriteBlockResponse</a></span>(org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
                          org.apache.hadoop.hdfs.protocol.DatanodeInfo&nbsp;dnInfo,
                          org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise&lt;org.apache.hbase.thirdparty.io.netty.channel.Channel&gt;&nbsp;promise,
                          int&nbsp;timeoutMs)</code>&nbsp;</td>
 </tr>
-<tr id="i24" class="altColor">
+<tr id="i15" class="rowColor">
 <td class="colFirst"><code>private static void</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#requestWriteBlock-org.apache.hbase.thirdparty.io.netty.channel.Channel-java.lang.Enum-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-">requestWriteBlock</a></span>(org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
-                 <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;?&gt;&nbsp;storageType,
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#requestWriteBlock-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.fs.StorageType-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-">requestWriteBlock</a></span>(org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
+                 org.apache.hadoop.fs.StorageType&nbsp;storageType,
                  org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder&nbsp;writeBlockProtoBuilder)</code>&nbsp;</td>
 </tr>
-<tr id="i25" class="rowColor">
+<tr id="i16" class="altColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#shouldRetryCreate-org.apache.hadoop.ipc.RemoteException-">shouldRetryCreate</a></span>(org.apache.hadoop.ipc.RemoteException&nbsp;e)</code>&nbsp;</td>
 </tr>
-<tr id="i26" class="altColor">
+<tr id="i17" class="rowColor">
 <td class="colFirst"><code>(package private) static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#sleepIgnoreInterrupt-int-">sleepIgnoreInterrupt</a></span>(int&nbsp;retry)</code>&nbsp;</td>
 </tr>
@@ -465,7 +387,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.119">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.117">LOG</a></pre>
 </li>
 </ul>
 <a name="ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES">
@@ -474,7 +396,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.125">ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.123">ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES">Constant Field Values</a></dd>
@@ -487,7 +409,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES</h4>
-<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.127">DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.125">DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES">Constant Field Values</a></dd>
@@ -500,7 +422,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>ALLOC</h4>
-<pre>private static final&nbsp;org.apache.hbase.thirdparty.io.netty.buffer.ByteBufAllocator <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.129">ALLOC</a></pre>
+<pre>private static final&nbsp;org.apache.hbase.thirdparty.io.netty.buffer.ByteBufAllocator <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.127">ALLOC</a></pre>
 </li>
 </ul>
 <a name="HEART_BEAT_SEQNO">
@@ -509,7 +431,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>HEART_BEAT_SEQNO</h4>
-<pre>public static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.132">HEART_BEAT_SEQNO</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.130">HEART_BEAT_SEQNO</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.HEART_BEAT_SEQNO">Constant Field Values</a></dd>
@@ -522,7 +444,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>READ_TIMEOUT</h4>
-<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.135">READ_TIMEOUT</a></pre>
+<pre>public static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.133">READ_TIMEOUT</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.READ_TIMEOUT">Constant Field Values</a></dd>
@@ -535,34 +457,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>EMPTY_DN_ARRAY</h4>
-<pre>private static final&nbsp;org.apache.hadoop.hdfs.protocol.DatanodeInfo[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.137">EMPTY_DN_ARRAY</a></pre>
-</li>
-</ul>
-<a name="PIPELINE_ACK_STATUS_GETTER">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>PIPELINE_ACK_STATUS_GETTER</h4>
-<pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.147">PIPELINE_ACK_STATUS_GETTER</a></pre>
-</li>
-</ul>
-<a name="STORAGE_TYPE_SETTER">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>STORAGE_TYPE_SETTER</h4>
-<pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.155">STORAGE_TYPE_SETTER</a></pre>
-</li>
-</ul>
-<a name="BLOCK_ADDER">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>BLOCK_ADDER</h4>
-<pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.166">BLOCK_ADDER</a></pre>
+<pre>private static final&nbsp;org.apache.hadoop.hdfs.protocol.DatanodeInfo[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.135">EMPTY_DN_ARRAY</a></pre>
 </li>
 </ul>
 <a name="LEASE_MANAGER">
@@ -571,7 +466,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LEASE_MANAGER</h4>
-<pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.LeaseManager</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.175">LEASE_MANAGER</a></pre>
+<pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.LeaseManager</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.144">LEASE_MANAGER</a></pre>
 </li>
 </ul>
 <a name="DFS_CLIENT_ADAPTOR">
@@ -580,25 +475,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DFS_CLIENT_ADAPTOR</h4>
-<pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.184">DFS_CLIENT_ADAPTOR</a></pre>
-</li>
-</ul>
-<a name="PB_HELPER">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>PB_HELPER</h4>
-<pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.194">PB_HELPER</a></pre>
-</li>
-</ul>
-<a name="CHECKSUM_CREATER">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>CHECKSUM_CREATER</h4>
-<pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.201">CHECKSUM_CREATER</a></pre>
+<pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.153">DFS_CLIENT_ADAPTOR</a></pre>
 </li>
 </ul>
 <a name="FILE_CREATOR">
@@ -607,7 +484,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FILE_CREATOR</h4>
-<pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.226">FILE_CREATOR</a></pre>
+<pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.178">FILE_CREATOR</a></pre>
 </li>
 </ul>
 </li>
@@ -624,7 +501,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FanOutOneBlockAsyncDFSOutputHelper</h4>
-<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.122">FanOutOneBlockAsyncDFSOutputHelper</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.120">FanOutOneBlockAsyncDFSOutputHelper</a>()</pre>
 </li>
 </ul>
 </li>
@@ -641,7 +518,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createDFSClientAdaptor</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.228">createDFSClientAdaptor</a>()
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.180">createDFSClientAdaptor</a>()
                                                                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -655,7 +532,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createLeaseManager</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.LeaseManager</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.244">createLeaseManager</a>()
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.LeaseManager</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.196">createLeaseManager</a>()
                                                                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -663,143 +540,13 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </dl>
 </li>
 </ul>
-<a name="createPipelineAckStatusGetter27--">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>createPipelineAckStatusGetter27</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.272">createPipelineAckStatusGetter27</a>()
-                                                                                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
-<dl>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></code></dd>
-</dl>
-</li>
-</ul>
-<a name="createPipelineAckStatusGetter26--">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>createPipelineAckStatusGetter26</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.316">createPipelineAckStatusGetter26</a>()
-                                                                                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
-<dl>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></code></dd>
-</dl>
-</li>
-</ul>
-<a name="createPipelineAckStatusGetter--">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>createPipelineAckStatusGetter</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.332">createPipelineAckStatusGetter</a>()
-                                                                                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
-<dl>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></code></dd>
-</dl>
-</li>
-</ul>
-<a name="createStorageTypeSetter--">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>createStorageTypeSetter</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.344">createStorageTypeSetter</a>()
-                                                                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
-<dl>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></code></dd>
-</dl>
-</li>
-</ul>
-<a name="createBlockAdder--">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>createBlockAdder</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.367">createBlockAdder</a>()
-                                                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
-<dl>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></code></dd>
-</dl>
-</li>
-</ul>
-<a name="createPBHelper--">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>createPBHelper</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.414">createPBHelper</a>()
-                                                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
-<dl>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></code></dd>
-</dl>
-</li>
-</ul>
-<a name="createChecksumCreater28-java.lang.reflect.Method-java.lang.Class-">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>createChecksumCreater28</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.448">createChecksumCreater28</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Method.html?is [...]
-                                                                                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;?&gt;&nbsp;confClass)
-                                                                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
-<dl>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></code></dd>
-</dl>
-</li>
-</ul>
-<a name="createChecksumCreater27-java.lang.reflect.Method-java.lang.Class-">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>createChecksumCreater27</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.470">createChecksumCreater27</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Method.html?is [...]
-                                                                                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;?&gt;&nbsp;confClass)
-                                                                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
-<dl>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></code></dd>
-</dl>
-</li>
-</ul>
-<a name="createChecksumCreater--">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>createChecksumCreater</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.487">createChecksumCreater</a>()
-                                                                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a>,
-                                                                                        <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/ClassNotFoundException.html?is-external=true" title="class or interface in java.lang">ClassNotFoundException</a></pre>
-<dl>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></code></dd>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/ClassNotFoundException.html?is-external=true" title="class or interface in java.lang">ClassNotFoundException</a></code></dd>
-</dl>
-</li>
-</ul>
 <a name="createFileCreator3--">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>createFileCreator3</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.500">createFileCreator3</a>()
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.224">createFileCreator3</a>()
                                                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -813,7 +560,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createFileCreator2</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.512">createFileCreator2</a>()
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.236">createFileCreator2</a>()
                                                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -827,7 +574,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createFileCreator</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.524">createFileCreator</a>()
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.248">createFileCreator</a>()
                                                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -841,7 +588,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>beginFileLease</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.567">beginFileLease</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
+<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.286">beginFileLease</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
                            long&nbsp;inodeId)</pre>
 </li>
 </ul>
@@ -851,7 +598,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>endFileLease</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.571">endFileLease</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
+<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.290">endFileLease</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
                          long&nbsp;inodeId)</pre>
 </li>
 </ul>
@@ -861,7 +608,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createChecksum</h4>
-<pre>static&nbsp;org.apache.hadoop.util.DataChecksum&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.575">createChecksum</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client)</pre>
+<pre>static&nbsp;org.apache.hadoop.util.DataChecksum&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.294">createChecksum</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client)</pre>
 </li>
 </ul>
 <a name="getStatus-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto-">
@@ -870,7 +617,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getStatus</h4>
-<pre>static&nbsp;org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.579">getStatus</a>(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto&nbsp;ack)</pre>
+<pre>static&nbsp;org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.298">getStatus</a>(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto&nbsp;ack)</pre>
 </li>
 </ul>
 <a name="processWriteBlockResponse-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.hdfs.protocol.DatanodeInfo-org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise-int-">
@@ -879,20 +626,20 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>processWriteBlockResponse</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.583">processWriteBlockResponse</a>(org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.310">processWriteBlockResponse</a>(org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
                                               org.apache.hadoop.hdfs.protocol.DatanodeInfo&nbsp;dnInfo,
                                               org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise&lt;org.apache.hbase.thirdparty.io.netty.channel.Channel&gt;&nbsp;promise,
                                               int&nbsp;timeoutMs)</pre>
 </li>
 </ul>
-<a name="requestWriteBlock-org.apache.hbase.thirdparty.io.netty.channel.Channel-java.lang.Enum-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-">
+<a name="requestWriteBlock-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.fs.StorageType-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>requestWriteBlock</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.644">requestWriteBlock</a>(org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
-                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;?&gt;&nbsp;storageType,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.371">requestWriteBlock</a>(org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
+                                      org.apache.hadoop.fs.StorageType&nbsp;storageType,
                                       org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder&nbsp;writeBlockProtoBuilder)
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -901,16 +648,16 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </dl>
 </li>
 </ul>
-<a name="initialize-org.apache.hadoop.conf.Configuration-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.hdfs.protocol.DatanodeInfo-java.lang.Enum-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-int-org.apache.hadoop.hdfs.DFSClient-org.apache.hadoop.security.token.Token-org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise-">
+<a name="initialize-org.apache.hadoop.conf.Configuration-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.hdfs.protocol.DatanodeInfo-org.apache.hadoop.fs.StorageType-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-int-org.apache.hadoop.hdfs.DFSClient-org.apache.hadoop.security.token.Token-org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>initialize</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.656">initialize</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.384">initialize</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
                                org.apache.hadoop.hdfs.protocol.DatanodeInfo&nbsp;dnInfo,
-                               <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang">Enum</a>&lt;?&gt;&nbsp;storageType,
+                               org.apache.hadoop.fs.StorageType&nbsp;storageType,
                                org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder&nbsp;writeBlockProtoBuilder,
                                int&nbsp;timeoutMs,
                                org.apache.hadoop.hdfs.DFSClient&nbsp;client,
@@ -929,7 +676,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>connectToDataNodes</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hbase.thirdparty.io.netty.util.concurrent.Future&lt;org.apache.hbase.thirdparty.io.netty.channel.Channel&gt;&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.677">connectToDataNodes</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hbase.thirdparty.io.netty.util.concurrent.Future&lt;org.apache.hbase.thirdparty.io.netty.channel.Channel&gt;&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.405">connectToDataNodes</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                                                                                                                           org.apache.hadoop.hdfs.DFSClient&nbsp;client,
                                                                                                                                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;clientName,
                                                                                                                                                           org.apache.hadoop.hdfs.protocol.LocatedBlock&nbsp;locatedBlock,
@@ -947,7 +694,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createOutput</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.743">createOutput</a>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs,
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.471">createOutput</a>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs,
                                                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;src,
                                                          boolean&nbsp;overwrite,
                                                          boolean&nbsp;createParent,
@@ -968,7 +715,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createOutput</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.840">createOutput</a>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs,
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.568">createOutput</a>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs,
                                                         org.apache.hadoop.fs.Path&nbsp;f,
                                                         boolean&nbsp;overwrite,
                                                         boolean&nbsp;createParent,
@@ -991,7 +738,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldRetryCreate</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.859">shouldRetryCreate</a>(org.apache.hadoop.ipc.RemoteException&nbsp;e)</pre>
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.587">shouldRetryCreate</a>(org.apache.hadoop.ipc.RemoteException&nbsp;e)</pre>
 </li>
 </ul>
 <a name="completeFile-org.apache.hadoop.hdfs.DFSClient-org.apache.hadoop.hdfs.protocol.ClientProtocol-java.lang.String-java.lang.String-org.apache.hadoop.hdfs.protocol.ExtendedBlock-long-">
@@ -1000,7 +747,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>completeFile</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.866">completeFile</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
+<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.594">completeFile</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
                          org.apache.hadoop.hdfs.protocol.ClientProtocol&nbsp;namenode,
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;src,
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;clientName,
@@ -1014,7 +761,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>sleepIgnoreInterrupt</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.891">sleepIgnoreInterrupt</a>(int&nbsp;retry)</pre>
+<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.619">sleepIgnoreInterrupt</a>(int&nbsp;retry)</pre>
 </li>
 </ul>
 </li>
@@ -1046,7 +793,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <div class="subNav">
 <ul class="navList">
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.State.html" title="enum in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" target="_top">Frames</a></li>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html
index 29336ca..92432a7 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html
@@ -132,7 +132,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.644">FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler</a>
+<pre>private static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.598">FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler</a>
 extends org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler&lt;org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf&gt;</pre>
 </li>
 </ul>
@@ -266,7 +266,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler
 <ul class="blockListLast">
 <li class="blockList">
 <h4>decryptor</h4>
-<pre>private final&nbsp;org.apache.hadoop.crypto.Decryptor <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html#line.646">decryptor</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.crypto.Decryptor <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html#line.600">decryptor</a></pre>
 </li>
 </ul>
 </li>
@@ -283,7 +283,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler
 <ul class="blockListLast">
 <li class="blockList">
 <h4>DecryptHandler</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html#line.648">DecryptHandler</a>(org.apache.hadoop.crypto.CryptoCodec&nbsp;codec,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html#line.602">DecryptHandler</a>(org.apache.hadoop.crypto.CryptoCodec&nbsp;codec,
                       byte[]&nbsp;key,
                       byte[]&nbsp;iv)
                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/security/GeneralSecurityException.html?is-external=true" title="class or interface in java.security">GeneralSecurityException</a>,
@@ -309,7 +309,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler
 <ul class="blockListLast">
 <li class="blockList">
 <h4>channelRead0</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html#line.655">channelRead0</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html#line.609">channelRead0</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
                             org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf&nbsp;msg)
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html
index 300474e..961255b 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 <div class="subNav">
 <ul class="navList">
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html" target="_top">Frames</a></li>
@@ -132,7 +132,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.677">FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler</a>
+<pre>private static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.631">FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler</a>
 extends org.apache.hbase.thirdparty.io.netty.handler.codec.MessageToByteEncoder&lt;org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf&gt;</pre>
 </li>
 </ul>
@@ -273,7 +273,7 @@ extends org.apache.hbase.thirdparty.io.netty.handler.codec.MessageToByteEncoder&
 <ul class="blockListLast">
 <li class="blockList">
 <h4>encryptor</h4>
-<pre>private final&nbsp;org.apache.hadoop.crypto.Encryptor <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html#line.679">encryptor</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.crypto.Encryptor <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html#line.633">encryptor</a></pre>
 </li>
 </ul>
 </li>
@@ -290,7 +290,7 @@ extends org.apache.hbase.thirdparty.io.netty.handler.codec.MessageToByteEncoder&
 <ul class="blockListLast">
 <li class="blockList">
 <h4>EncryptHandler</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html#line.681">EncryptHandler</a>(org.apache.hadoop.crypto.CryptoCodec&nbsp;codec,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html#line.635">EncryptHandler</a>(org.apache.hadoop.crypto.CryptoCodec&nbsp;codec,
                       byte[]&nbsp;key,
                       byte[]&nbsp;iv)
                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/security/GeneralSecurityException.html?is-external=true" title="class or interface in java.security">GeneralSecurityException</a>,
@@ -316,7 +316,7 @@ extends org.apache.hbase.thirdparty.io.netty.handler.codec.MessageToByteEncoder&
 <ul class="blockList">
 <li class="blockList">
 <h4>allocateBuffer</h4>
-<pre>protected&nbsp;org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html#line.688">allocateBuffer</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
+<pre>protected&nbsp;org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html#line.642">allocateBuffer</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
                                                                              org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf&nbsp;msg,
                                                                              boolean&nbsp;preferDirect)
                                                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
@@ -334,7 +334,7 @@ extends org.apache.hbase.thirdparty.io.netty.handler.codec.MessageToByteEncoder&
 <ul class="blockListLast">
 <li class="blockList">
 <h4>encode</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html#line.698">encode</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html#line.652">encode</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
                       org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf&nbsp;msg,
                       org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf&nbsp;out)
                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
@@ -375,7 +375,7 @@ extends org.apache.hbase.thirdparty.io.netty.handler.codec.MessageToByteEncoder&
 <div class="subNav">
 <ul class="navList">
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
 <li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html" target="_top">Frames</a></li>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html
deleted file mode 100644
index 84f252a..0000000
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html
+++ /dev/null
@@ -1,239 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!-- NewPage -->
-<html lang="en">
-<head>
-<!-- Generated by javadoc -->
-<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper (Apache HBase 3.0.0-SNAPSHOT API)</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
-<script type="text/javascript" src="../../../../../../script.js"></script>
-</head>
-<body>
-<script type="text/javascript"><!--
-    try {
-        if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper (Apache HBase 3.0.0-SNAPSHOT API)";
-        }
-    }
-    catch(err) {
-    }
-//-->
-var methods = {"i0":6,"i1":6};
-var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"]};
-var altColor = "altColor";
-var rowColor = "rowColor";
-var tableTab = "tableTab";
-var activeTableTab = "activeTableTab";
-</script>
-<noscript>
-<div>JavaScript is disabled on your browser.</div>
-</noscript>
-<!-- ========= START OF TOP NAVBAR ======= -->
-<div class="topNav"><a name="navbar.top">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.top.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="package-summary.html">Package</a></li>
-<li class="navBarCell1Rev">Class</li>
-<li><a href="class-use/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html">Use</a></li>
-<li><a href="package-tree.html">Tree</a></li>
-<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_top">
-<li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_top");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<div>
-<ul class="subNavList">
-<li>Summary:&nbsp;</li>
-<li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.summary">Method</a></li>
-</ul>
-<ul class="subNavList">
-<li>Detail:&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.detail">Method</a></li>
-</ul>
-</div>
-<a name="skip.navbar.top">
-<!--   -->
-</a></div>
-<!-- ========= END OF TOP NAVBAR ========= -->
-<!-- ======== START OF CLASS DATA ======== -->
-<div class="header">
-<div class="subTitle">org.apache.hadoop.hbase.io.asyncfs</div>
-<h2 title="Interface FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper" class="title">Interface FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</h2>
-</div>
-<div class="contentContainer">
-<div class="description">
-<ul class="blockList">
-<li class="blockList">
-<dl>
-<dt>Enclosing class:</dt>
-<dd><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper</a></dd>
-</dl>
-<hr>
-<br>
-<pre>private static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.132">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a></pre>
-</li>
-</ul>
-</div>
-<div class="summary">
-<ul class="blockList">
-<li class="blockList">
-<!-- ========== METHOD SUMMARY =========== -->
-<ul class="blockList">
-<li class="blockList"><a name="method.summary">
-<!--   -->
-</a>
-<h3>Method Summary</h3>
-<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
-<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t3" class="tableTab"><span><a href="javascript:show(4);">Abstract Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tr id="i0" class="altColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.crypto.CipherOption&gt;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html#convertCipherOptionProtos-java.util.List-">convertCipherOptionProtos</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CipherOptionProto&gt;&nbsp;options)</code>&nbsp;</td>
-</tr>
-<tr id="i1" class="rowColor">
-<td class="colFirst"><code><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CipherOptionProto&gt;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html#convertCipherOptions-java.util.List-">convertCipherOptions</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.crypto.CipherOption&gt;&nbsp;options)</code>&nbsp;</td>
-</tr>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-<div class="details">
-<ul class="blockList">
-<li class="blockList">
-<!-- ============ METHOD DETAIL ========== -->
-<ul class="blockList">
-<li class="blockList"><a name="method.detail">
-<!--   -->
-</a>
-<h3>Method Detail</h3>
-<a name="convertCipherOptions-java.util.List-">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>convertCipherOptions</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CipherOptionProto&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html#line.134">convertCipherOptions</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interf [...]
-</li>
-</ul>
-<a name="convertCipherOptionProtos-java.util.List-">
-<!--   -->
-</a>
-<ul class="blockListLast">
-<li class="blockList">
-<h4>convertCipherOptionProtos</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.crypto.CipherOption&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html#line.136">convertCipherOptionProtos</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</ [...]
-</li>
-</ul>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-</div>
-<!-- ========= END OF CLASS DATA ========= -->
-<!-- ======= START OF BOTTOM NAVBAR ====== -->
-<div class="bottomNav"><a name="navbar.bottom">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.bottom.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="package-summary.html">Package</a></li>
-<li class="navBarCell1Rev">Class</li>
-<li><a href="class-use/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html">Use</a></li>
-<li><a href="package-tree.html">Tree</a></li>
-<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_bottom">
-<li><a href="../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_bottom");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<div>
-<ul class="subNavList">
-<li>Summary:&nbsp;</li>
-<li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.summary">Method</a></li>
-</ul>
-<ul class="subNavList">
-<li>Detail:&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
-<li>Constr&nbsp;|&nbsp;</li>
-<li><a href="#method.detail">Method</a></li>
-</ul>
-</div>
-<a name="skip.navbar.bottom">
-<!--   -->
-</a></div>
-<!-- ======== END OF BOTTOM NAVBAR ======= -->
-<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2019 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
-</body>
-</html>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html
index cd6a74d..1cd7f8b 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
@@ -206,7 +206,7 @@ var activeTableTab = "activeTableTab";
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.EncryptHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html
index dcd463f..e045c89 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.313">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler</a>
+<pre>private static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.267">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/callback/CallbackHandler.html?is-external=true" title="class or interface in javax.security.auth.callback">CallbackHandler</a></pre>
 <div class="block">Sets user name and password when asked by the client-side SASL object.</div>
@@ -214,7 +214,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/aut
 <ul class="blockList">
 <li class="blockList">
 <h4>password</h4>
-<pre>private final&nbsp;char[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html#line.315">password</a></pre>
+<pre>private final&nbsp;char[] <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html#line.269">password</a></pre>
 </li>
 </ul>
 <a name="userName">
@@ -223,7 +223,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/aut
 <ul class="blockListLast">
 <li class="blockList">
 <h4>userName</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html#line.316">userName</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html#line.270">userName</a></pre>
 </li>
 </ul>
 </li>
@@ -240,7 +240,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/aut
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SaslClientCallbackHandler</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html#line.323">SaslClientCallbackHandler</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;userName,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html#line.277">SaslClientCallbackHandler</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;userName,
                                  char[]&nbsp;password)</pre>
 <div class="block">Creates a new SaslClientCallbackHandler.</div>
 <dl>
@@ -263,7 +263,7 @@ implements <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/aut
 <ul class="blockListLast">
 <li class="blockList">
 <h4>handle</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html#line.329">handle</a>(<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/callback/Callback.html?is-external=true" title="class or interface in javax.security.auth.callback">Callback</a>[]&nbsp;callbacks)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html#line.283">handle</a>(<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/callback/Callback.html?is-external=true" title="class or interface in javax.security.auth.callback">Callback</a>[]&nbsp;callbacks)
             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                    <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/auth/callback/UnsupportedCallbackException.html?is-external=true" title="class or interface in javax.security.auth.callback">UnsupportedCallbackException</a></pre>
 <dl>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html
index f0a30f0..ba8eb4e 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html
@@ -132,7 +132,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.358">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler</a>
+<pre>private static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.312">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler</a>
 extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 </li>
 </ul>
@@ -362,7 +362,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>private final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.360">conf</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.314">conf</a></pre>
 </li>
 </ul>
 <a name="saslProps">
@@ -371,7 +371,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>saslProps</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../../src-html/org/a [...]
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../../src-html/org/a [...]
 </li>
 </ul>
 <a name="saslClient">
@@ -380,7 +380,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>saslClient</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true" title="class or interface in javax.security.sasl">SaslClient</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.364">saslClient</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true" title="class or interface in javax.security.sasl">SaslClient</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.318">saslClient</a></pre>
 </li>
 </ul>
 <a name="timeoutMs">
@@ -389,7 +389,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>timeoutMs</h4>
-<pre>private final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.366">timeoutMs</a></pre>
+<pre>private final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.320">timeoutMs</a></pre>
 </li>
 </ul>
 <a name="promise">
@@ -398,7 +398,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>promise</h4>
-<pre>private final&nbsp;org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.368">promise</a></pre>
+<pre>private final&nbsp;org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.322">promise</a></pre>
 </li>
 </ul>
 <a name="dfsClient">
@@ -407,7 +407,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>dfsClient</h4>
-<pre>private final&nbsp;org.apache.hadoop.hdfs.DFSClient <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.370">dfsClient</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.hdfs.DFSClient <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.324">dfsClient</a></pre>
 </li>
 </ul>
 <a name="step">
@@ -416,7 +416,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>step</h4>
-<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.372">step</a></pre>
+<pre>private&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.326">step</a></pre>
 </li>
 </ul>
 </li>
@@ -433,7 +433,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SaslNegotiateHandler</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.374">SaslNegotiateHandler</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.328">SaslNegotiateHandler</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                             <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;username,
                             char[]&nbsp;password,
                             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;saslProps,
@@ -461,7 +461,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>sendSaslMessage</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.386">sendSaslMessage</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.340">sendSaslMessage</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
                              byte[]&nbsp;payload)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -476,7 +476,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>getCipherOptions</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.crypto.CipherOption&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.390">getCipherOptions</a>()
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.crypto.CipherOption&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.344">getCipherOptions</a>()
                                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -490,7 +490,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>sendSaslMessage</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.405">sendSaslMessage</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.359">sendSaslMessage</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
                              byte[]&nbsp;payload,
                              <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.crypto.CipherOption&gt;&nbsp;options)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -506,7 +506,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>handlerAdded</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.427">handlerAdded</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.381">handlerAdded</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -524,7 +524,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>channelInactive</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.435">channelInactive</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.389">channelInactive</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx)
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -542,7 +542,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>check</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.439">check</a>(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto&nbsp;proto)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.393">check</a>(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto&nbsp;proto)
             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -556,7 +556,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>getNegotiatedQop</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.448">getNegotiatedQop</a>()</pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.402">getNegotiatedQop</a>()</pre>
 </li>
 </ul>
 <a name="isNegotiatedQopPrivacy--">
@@ -565,7 +565,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>isNegotiatedQopPrivacy</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.452">isNegotiatedQopPrivacy</a>()</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.406">isNegotiatedQopPrivacy</a>()</pre>
 </li>
 </ul>
 <a name="requestedQopContainsPrivacy--">
@@ -574,7 +574,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>requestedQopContainsPrivacy</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.457">requestedQopContainsPrivacy</a>()</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.411">requestedQopContainsPrivacy</a>()</pre>
 </li>
 </ul>
 <a name="checkSaslComplete--">
@@ -583,7 +583,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>checkSaslComplete</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.463">checkSaslComplete</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.417">checkSaslComplete</a>()
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -597,7 +597,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>useWrap</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.480">useWrap</a>()</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.434">useWrap</a>()</pre>
 </li>
 </ul>
 <a name="unwrap-org.apache.hadoop.crypto.CipherOption-javax.security.sasl.SaslClient-">
@@ -606,7 +606,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>unwrap</h4>
-<pre>private&nbsp;org.apache.hadoop.crypto.CipherOption&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.485">unwrap</a>(org.apache.hadoop.crypto.CipherOption&nbsp;option,
+<pre>private&nbsp;org.apache.hadoop.crypto.CipherOption&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.439">unwrap</a>(org.apache.hadoop.crypto.CipherOption&nbsp;option,
                                                      <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true" title="class or interface in javax.security.sasl">SaslClient</a>&nbsp;saslClient)
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -621,7 +621,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>getCipherOption</h4>
-<pre>private&nbsp;org.apache.hadoop.crypto.CipherOption&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.498">getCipherOption</a>(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto&nbsp;proto,
+<pre>private&nbsp;org.apache.hadoop.crypto.CipherOption&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.452">getCipherOption</a>(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto&nbsp;proto,
                                                               boolean&nbsp;isNegotiatedQopPrivacy,
                                                               <a href="https://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true" title="class or interface in javax.security.sasl">SaslClient</a>&nbsp;saslClient)
                                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -637,7 +637,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>channelRead</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.510">channelRead</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.464">channelRead</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;msg)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
@@ -656,7 +656,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockList">
 <li class="blockList">
 <h4>exceptionCaught</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.559">exceptionCaught</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.513">exceptionCaught</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
                             <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&nbsp;cause)
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
@@ -677,7 +677,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler</pre>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>userEventTriggered</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.564">userEventTriggered</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html#line.518">userEventTriggered</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;evt)
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html
index 846e45b..f5014d1 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html
@@ -132,7 +132,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.573">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler</a>
+<pre>private static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.527">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler</a>
 extends org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler&lt;org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf&gt;</pre>
 </li>
 </ul>
@@ -268,7 +268,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler
 <ul class="blockListLast">
 <li class="blockList">
 <h4>saslClient</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true" title="class or interface in javax.security.sasl">SaslClient</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html#line.575">saslClient</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true" title="class or interface in javax.security.sasl">SaslClient</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html#line.529">saslClient</a></pre>
 </li>
 </ul>
 </li>
@@ -285,7 +285,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SaslUnwrapHandler</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html#line.577">SaslUnwrapHandler</a>(<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true" title="class or interface in javax.security.sasl">SaslClient</a>&nbsp;saslClient)</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html#line.531">SaslUnwrapHandler</a>(<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true" title="class or interface in javax.security.sasl">SaslClient</a>&nbsp;saslClient)</pre>
 </li>
 </ul>
 </li>
@@ -302,7 +302,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler
 <ul class="blockList">
 <li class="blockList">
 <h4>channelInactive</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html#line.582">channelInactive</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html#line.536">channelInactive</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx)
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -320,7 +320,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler
 <ul class="blockListLast">
 <li class="blockList">
 <h4>channelRead0</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html#line.587">channelRead0</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html#line.541">channelRead0</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
                             org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf&nbsp;msg)
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html
index b7ef788..034357c 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.595">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler</a>
+<pre>private static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.549">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler</a>
 extends org.apache.hbase.thirdparty.io.netty.channel.ChannelOutboundHandlerAdapter</pre>
 </li>
 </ul>
@@ -270,7 +270,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelOutboundHandlerAdapt
 <ul class="blockList">
 <li class="blockList">
 <h4>saslClient</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true" title="class or interface in javax.security.sasl">SaslClient</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html#line.597">saslClient</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true" title="class or interface in javax.security.sasl">SaslClient</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html#line.551">saslClient</a></pre>
 </li>
 </ul>
 <a name="cBuf">
@@ -279,7 +279,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelOutboundHandlerAdapt
 <ul class="blockListLast">
 <li class="blockList">
 <h4>cBuf</h4>
-<pre>private&nbsp;org.apache.hbase.thirdparty.io.netty.buffer.CompositeByteBuf <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html#line.599">cBuf</a></pre>
+<pre>private&nbsp;org.apache.hbase.thirdparty.io.netty.buffer.CompositeByteBuf <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html#line.553">cBuf</a></pre>
 </li>
 </ul>
 </li>
@@ -296,7 +296,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelOutboundHandlerAdapt
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SaslWrapHandler</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html#line.601">SaslWrapHandler</a>(<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true" title="class or interface in javax.security.sasl">SaslClient</a>&nbsp;saslClient)</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html#line.555">SaslWrapHandler</a>(<a href="https://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslClient.html?is-external=true" title="class or interface in javax.security.sasl">SaslClient</a>&nbsp;saslClient)</pre>
 </li>
 </ul>
 </li>
@@ -313,7 +313,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelOutboundHandlerAdapt
 <ul class="blockList">
 <li class="blockList">
 <h4>handlerAdded</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html#line.606">handlerAdded</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html#line.560">handlerAdded</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -331,7 +331,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelOutboundHandlerAdapt
 <ul class="blockList">
 <li class="blockList">
 <h4>write</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html#line.611">write</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html#line.565">write</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;msg,
                   org.apache.hbase.thirdparty.io.netty.channel.ChannelPromise&nbsp;promise)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
@@ -351,7 +351,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelOutboundHandlerAdapt
 <ul class="blockList">
 <li class="blockList">
 <h4>flush</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html#line.623">flush</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html#line.577">flush</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -369,7 +369,7 @@ extends org.apache.hbase.thirdparty.io.netty.channel.ChannelOutboundHandlerAdapt
 <ul class="blockListLast">
 <li class="blockList">
 <h4>close</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html#line.638">close</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html#line.592">close</a>(org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext&nbsp;ctx,
                   org.apache.hbase.thirdparty.io.netty.channel.ChannelPromise&nbsp;promise)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html
index b344034..86baf45 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.141">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a></pre>
+<pre>private static interface <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.131">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a></pre>
 </li>
 </ul>
 </div>
@@ -151,7 +151,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockListLast">
 <li class="blockList">
 <h4>createEncryptor</h4>
-<pre>org.apache.hadoop.crypto.Encryptor&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html#line.143">createEncryptor</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>org.apache.hadoop.crypto.Encryptor&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html#line.133">createEncryptor</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                    org.apache.hadoop.fs.FileEncryptionInfo&nbsp;feInfo,
                                                    org.apache.hadoop.hdfs.DFSClient&nbsp;client)
                                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html
index e162527..cc21b0e 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9};
+var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
@@ -141,31 +141,27 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>private static interface&nbsp;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a></span></code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><code>private static interface&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static class&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslClientCallbackHandler</a></span></code>
 <div class="block">Sets user name and password when asked by the client-side SASL object.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static class&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static class&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslUnwrapHandler</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static class&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslWrapHandler</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static interface&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a></span></code>&nbsp;</td>
 </tr>
@@ -197,26 +193,22 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#NAME_DELIMITER">NAME_DELIMITER</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#PB_HELPER">PB_HELPER</a></span></code>&nbsp;</td>
-</tr>
-<tr class="altColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#PROTOCOL">PROTOCOL</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#SASL_ADAPTOR">SASL_ADAPTOR</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#SASL_TRANSFER_MAGIC_NUMBER">SASL_TRANSFER_MAGIC_NUMBER</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#SERVER_NAME">SERVER_NAME</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#TRANSPARENT_CRYPTO_HELPER">TRANSPARENT_CRYPTO_HELPER</a></span></code>&nbsp;</td>
 </tr>
@@ -269,30 +261,26 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
                org.apache.hadoop.hdfs.DFSClient&nbsp;client)</code>&nbsp;</td>
 </tr>
 <tr id="i3" class="rowColor">
-<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createPBHelper--">createPBHelper</a></span>()</code>&nbsp;</td>
-</tr>
-<tr id="i4" class="altColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createSaslAdaptor--">createSaslAdaptor</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i5" class="rowColor">
+<tr id="i4" class="altColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createSaslPropertiesForEncryption-java.lang.String-">createSaslPropertiesForEncryption</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;encryptionAlgorithm)</code>&nbsp;</td>
 </tr>
-<tr id="i6" class="altColor">
+<tr id="i5" class="rowColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createTransparentCryptoHelper--">createTransparentCryptoHelper</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i7" class="rowColor">
+<tr id="i6" class="altColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createTransparentCryptoHelper27--">createTransparentCryptoHelper27</a></span>()</code>&nbsp;</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createTransparentCryptoHelperWithHDFS12396--">createTransparentCryptoHelperWithHDFS12396</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i8" class="altColor">
+<tr id="i7" class="rowColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createTransparentCryptoHelper28--">createTransparentCryptoHelper28</a></span>()</code>&nbsp;</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createTransparentCryptoHelperWithoutHDFS12396--">createTransparentCryptoHelperWithoutHDFS12396</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i9" class="rowColor">
+<tr id="i8" class="altColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#doSaslNegotiation-org.apache.hadoop.conf.Configuration-org.apache.hbase.thirdparty.io.netty.channel.Channel-int-java.lang.String-char:A-java.util.Map-org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise-org.apache.hadoop.hdfs.DFSClient-">doSaslNegotiation</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                  org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
@@ -303,15 +291,15 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
                  org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true" title="class or interface in java.lang">Void</a>&gt;&nbsp;saslPromise,
                  org.apache.hadoop.hdfs.DFSClient&nbsp;dfsClient)</code>&nbsp;</td>
 </tr>
-<tr id="i10" class="altColor">
+<tr id="i9" class="rowColor">
 <td class="colFirst"><code>private static char[]</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#encryptionKeyToPassword-byte:A-">encryptionKeyToPassword</a></span>(byte[]&nbsp;encryptionKey)</code>&nbsp;</td>
 </tr>
-<tr id="i11" class="rowColor">
+<tr id="i10" class="altColor">
 <td class="colFirst"><code>private static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#getUserNameFromEncryptionKey-org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey-">getUserNameFromEncryptionKey</a></span>(org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey&nbsp;encryptionKey)</code>&nbsp;</td>
 </tr>
-<tr id="i12" class="altColor">
+<tr id="i11" class="rowColor">
 <td class="colFirst"><code>(package private) static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#trySaslNegotiate-org.apache.hadoop.conf.Configuration-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.hdfs.protocol.DatanodeInfo-int-org.apache.hadoop.hdfs.DFSClient-org.apache.hadoop.security.token.Token-org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise-">trySaslNegotiate</a></span>(org.apache.h [...]
                 org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
@@ -426,22 +414,13 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.129">SASL_ADAPTOR</a></pre>
 </li>
 </ul>
-<a name="PB_HELPER">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>PB_HELPER</h4>
-<pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.139">PB_HELPER</a></pre>
-</li>
-</ul>
 <a name="TRANSPARENT_CRYPTO_HELPER">
 <!--   -->
 </a>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TRANSPARENT_CRYPTO_HELPER</h4>
-<pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.147">TRANSPARENT_CRYPTO_HELPER</a></pre>
+<pre>private static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.137">TRANSPARENT_CRYPTO_HELPER</a></pre>
 </li>
 </ul>
 </li>
@@ -475,7 +454,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createSaslAdaptor</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.149">createSaslAdaptor</a>()
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.139">createSaslAdaptor</a>()
                                                                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchFieldException.html?is-external=true" title="class or interface in java.lang">NoSuchFieldException</a>,
                                                                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
 <dl>
@@ -485,43 +464,29 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </dl>
 </li>
 </ul>
-<a name="createPBHelper--">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>createPBHelper</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.191">createPBHelper</a>()
-                                                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
-<dl>
-<dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></code></dd>
-</dl>
-</li>
-</ul>
-<a name="createTransparentCryptoHelper27--">
+<a name="createTransparentCryptoHelperWithoutHDFS12396--">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>createTransparentCryptoHelper27</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.226">createTransparentCryptoHelper27</a>()
-                                                                                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
+<h4>createTransparentCryptoHelperWithoutHDFS12396</h4>
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.181">createTransparentCryptoHelperWithoutHDFS12396</a>()
+                                                                                                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></code></dd>
 </dl>
 </li>
 </ul>
-<a name="createTransparentCryptoHelper28--">
+<a name="createTransparentCryptoHelperWithHDFS12396--">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
-<h4>createTransparentCryptoHelper28</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.255">createTransparentCryptoHelper28</a>()
-                                                                                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/ClassNotFoundException.html?is-external=true" title="class or interface in java.lang">ClassNotFoundException</a>,
-                                                                                                              <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
+<h4>createTransparentCryptoHelperWithHDFS12396</h4>
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.210">createTransparentCryptoHelperWithHDFS12396</a>()
+                                                                                                                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/ClassNotFoundException.html?is-external=true" title="class or interface in java.lang">ClassNotFoundException</a>,
+                                                                                                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/ClassNotFoundException.html?is-external=true" title="class or interface in java.lang">ClassNotFoundException</a></code></dd>
@@ -535,7 +500,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createTransparentCryptoHelper</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.285">createTransparentCryptoHelper</a>()
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.240">createTransparentCryptoHelper</a>()
                                                                                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a>,
                                                                                                             <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/ClassNotFoundException.html?is-external=true" title="class or interface in java.lang">ClassNotFoundException</a></pre>
 <dl>
@@ -551,7 +516,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getUserNameFromEncryptionKey</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.718">getUserNameFromEncryptionKey</a>(org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey&nbsp;encryptionKey)</pre>
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.672">getUserNameFromEncryptionKey</a>(org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey&nbsp;encryptionKey)</pre>
 </li>
 </ul>
 <a name="encryptionKeyToPassword-byte:A-">
@@ -560,7 +525,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>encryptionKeyToPassword</h4>
-<pre>private static&nbsp;char[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.723">encryptionKeyToPassword</a>(byte[]&nbsp;encryptionKey)</pre>
+<pre>private static&nbsp;char[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.677">encryptionKeyToPassword</a>(byte[]&nbsp;encryptionKey)</pre>
 </li>
 </ul>
 <a name="buildUsername-org.apache.hadoop.security.token.Token-">
@@ -569,7 +534,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>buildUsername</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.727">buildUsername</a>(org.apache.hadoop.security.token.Token&lt;org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier&gt;&nbsp;blockToken)</pre>
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.681">buildUsername</a>(org.apache.hadoop.security.token.Token&lt;org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier&gt;&nbsp;blockToken)</pre>
 </li>
 </ul>
 <a name="buildClientPassword-org.apache.hadoop.security.token.Token-">
@@ -578,7 +543,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>buildClientPassword</h4>
-<pre>private static&nbsp;char[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.731">buildClientPassword</a>(org.apache.hadoop.security.token.Token&lt;org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier&gt;&nbsp;blockToken)</pre>
+<pre>private static&nbsp;char[]&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.685">buildClientPassword</a>(org.apache.hadoop.security.token.Token&lt;org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier&gt;&nbsp;blockToken)</pre>
 </li>
 </ul>
 <a name="createSaslPropertiesForEncryption-java.lang.String-">
@@ -587,7 +552,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createSaslPropertiesForEncryption</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../../src-html [...]
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../../src-html [...]
 </li>
 </ul>
 <a name="doSaslNegotiation-org.apache.hadoop.conf.Configuration-org.apache.hbase.thirdparty.io.netty.channel.Channel-int-java.lang.String-char:A-java.util.Map-org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise-org.apache.hadoop.hdfs.DFSClient-">
@@ -596,7 +561,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>doSaslNegotiation</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.744">doSaslNegotiation</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.698">doSaslNegotiation</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                       org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
                                       int&nbsp;timeoutMs,
                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;username,
@@ -612,7 +577,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>trySaslNegotiate</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.758">trySaslNegotiate</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.712">trySaslNegotiate</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                              org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
                              org.apache.hadoop.hdfs.protocol.DatanodeInfo&nbsp;dnInfo,
                              int&nbsp;timeoutMs,
@@ -632,7 +597,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>createEncryptor</h4>
-<pre>static&nbsp;org.apache.hadoop.crypto.Encryptor&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.819">createEncryptor</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>static&nbsp;org.apache.hadoop.crypto.Encryptor&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#line.773">createEncryptor</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                           org.apache.hadoop.hdfs.protocol.HdfsFileStatus&nbsp;stat,
                                                           org.apache.hadoop.hdfs.DFSClient&nbsp;client)
                                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -670,7 +635,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </div>
 <div class="subNav">
 <ul class="navList">
-<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
+<li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Prev&nbsp;Class</span></a></li>
 <li><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.DecryptHandler.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">Next&nbsp;Class</span></a></li>
 </ul>
 <ul class="navList">
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
deleted file mode 100644
index be99d8c..0000000
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
+++ /dev/null
@@ -1,178 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!-- NewPage -->
-<html lang="en">
-<head>
-<!-- Generated by javadoc -->
-<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.BlockAdder (Apache HBase 3.0.0-SNAPSHOT API)</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
-<script type="text/javascript" src="../../../../../../../script.js"></script>
-</head>
-<body>
-<script type="text/javascript"><!--
-    try {
-        if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.BlockAdder (Apache HBase 3.0.0-SNAPSHOT API)";
-        }
-    }
-    catch(err) {
-    }
-//-->
-</script>
-<noscript>
-<div>JavaScript is disabled on your browser.</div>
-</noscript>
-<!-- ========= START OF TOP NAVBAR ======= -->
-<div class="topNav"><a name="navbar.top">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.top.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">Class</a></li>
-<li class="navBarCell1Rev">Use</li>
-<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
-<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li>Prev</li>
-<li>Next</li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_top">
-<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_top");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<a name="skip.navbar.top">
-<!--   -->
-</a></div>
-<!-- ========= END OF TOP NAVBAR ========= -->
-<div class="header">
-<h2 title="Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.BlockAdder" class="title">Uses of Interface<br>org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</h2>
-</div>
-<div class="classUseContainer">
-<ul class="blockList">
-<li class="blockList">
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
-<caption><span>Packages that use <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Package</th>
-<th class="colLast" scope="col">Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><a href="#org.apache.hadoop.hbase.io.asyncfs">org.apache.hadoop.hbase.io.asyncfs</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-<li class="blockList">
-<ul class="blockList">
-<li class="blockList"><a name="org.apache.hadoop.hbase.io.asyncfs">
-<!--   -->
-</a>
-<h3>Uses of <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</a> in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></h3>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
-<caption><span>Fields in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a> declared as <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Field and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#BLOCK_ADDER">BLOCK_ADDER</a></span></code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
-<caption><span>Methods in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a> that return <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createBlockAdder--">createBlockAdder</a></span>()</code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-<!-- ======= START OF BOTTOM NAVBAR ====== -->
-<div class="bottomNav"><a name="navbar.bottom">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.bottom.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">Class</a></li>
-<li class="navBarCell1Rev">Use</li>
-<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
-<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li>Prev</li>
-<li>Next</li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_bottom">
-<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_bottom");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<a name="skip.navbar.bottom">
-<!--   -->
-</a></div>
-<!-- ======== END OF BOTTOM NAVBAR ======= -->
-<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2019 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
-</body>
-</html>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
deleted file mode 100644
index 0c7fbf0..0000000
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
+++ /dev/null
@@ -1,188 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!-- NewPage -->
-<html lang="en">
-<head>
-<!-- Generated by javadoc -->
-<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater (Apache HBase 3.0.0-SNAPSHOT API)</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
-<script type="text/javascript" src="../../../../../../../script.js"></script>
-</head>
-<body>
-<script type="text/javascript"><!--
-    try {
-        if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater (Apache HBase 3.0.0-SNAPSHOT API)";
-        }
-    }
-    catch(err) {
-    }
-//-->
-</script>
-<noscript>
-<div>JavaScript is disabled on your browser.</div>
-</noscript>
-<!-- ========= START OF TOP NAVBAR ======= -->
-<div class="topNav"><a name="navbar.top">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.top.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">Class</a></li>
-<li class="navBarCell1Rev">Use</li>
-<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
-<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li>Prev</li>
-<li>Next</li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_top">
-<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_top");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<a name="skip.navbar.top">
-<!--   -->
-</a></div>
-<!-- ========= END OF TOP NAVBAR ========= -->
-<div class="header">
-<h2 title="Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater" class="title">Uses of Interface<br>org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</h2>
-</div>
-<div class="classUseContainer">
-<ul class="blockList">
-<li class="blockList">
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
-<caption><span>Packages that use <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Package</th>
-<th class="colLast" scope="col">Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><a href="#org.apache.hadoop.hbase.io.asyncfs">org.apache.hadoop.hbase.io.asyncfs</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-<li class="blockList">
-<ul class="blockList">
-<li class="blockList"><a name="org.apache.hadoop.hbase.io.asyncfs">
-<!--   -->
-</a>
-<h3>Uses of <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a> in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></h3>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
-<caption><span>Fields in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a> declared as <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Field and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#CHECKSUM_CREATER">CHECKSUM_CREATER</a></span></code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
-<caption><span>Methods in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a> that return <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createChecksumCreater--">createChecksumCreater</a></span>()</code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createChecksumCreater27-java.lang.reflect.Method-java.lang.Class-">createChecksumCreater27</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Method.html?is-external=true" title="class or interface in java.lang.reflect">Method</a>&nb [...]
-                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;?&gt;&nbsp;confClass)</code>&nbsp;</td>
-</tr>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createChecksumCreater28-java.lang.reflect.Method-java.lang.Class-">createChecksumCreater28</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/reflect/Method.html?is-external=true" title="class or interface in java.lang.reflect">Method</a>&nb [...]
-                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;?&gt;&nbsp;confClass)</code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-<!-- ======= START OF BOTTOM NAVBAR ====== -->
-<div class="bottomNav"><a name="navbar.bottom">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.bottom.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">Class</a></li>
-<li class="navBarCell1Rev">Use</li>
-<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
-<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li>Prev</li>
-<li>Next</li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_bottom">
-<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_bottom");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<a name="skip.navbar.bottom">
-<!--   -->
-</a></div>
-<!-- ======== END OF BOTTOM NAVBAR ======= -->
-<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2019 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
-</body>
-</html>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html
deleted file mode 100644
index d32138a..0000000
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html
+++ /dev/null
@@ -1,178 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!-- NewPage -->
-<html lang="en">
-<head>
-<!-- Generated by javadoc -->
-<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.PBHelper (Apache HBase 3.0.0-SNAPSHOT API)</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
-<script type="text/javascript" src="../../../../../../../script.js"></script>
-</head>
-<body>
-<script type="text/javascript"><!--
-    try {
-        if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.PBHelper (Apache HBase 3.0.0-SNAPSHOT API)";
-        }
-    }
-    catch(err) {
-    }
-//-->
-</script>
-<noscript>
-<div>JavaScript is disabled on your browser.</div>
-</noscript>
-<!-- ========= START OF TOP NAVBAR ======= -->
-<div class="topNav"><a name="navbar.top">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.top.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">Class</a></li>
-<li class="navBarCell1Rev">Use</li>
-<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
-<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li>Prev</li>
-<li>Next</li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_top">
-<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_top");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<a name="skip.navbar.top">
-<!--   -->
-</a></div>
-<!-- ========= END OF TOP NAVBAR ========= -->
-<div class="header">
-<h2 title="Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.PBHelper" class="title">Uses of Interface<br>org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.PBHelper</h2>
-</div>
-<div class="classUseContainer">
-<ul class="blockList">
-<li class="blockList">
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
-<caption><span>Packages that use <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Package</th>
-<th class="colLast" scope="col">Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><a href="#org.apache.hadoop.hbase.io.asyncfs">org.apache.hadoop.hbase.io.asyncfs</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-<li class="blockList">
-<ul class="blockList">
-<li class="blockList"><a name="org.apache.hadoop.hbase.io.asyncfs">
-<!--   -->
-</a>
-<h3>Uses of <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a> in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></h3>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
-<caption><span>Fields in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a> declared as <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Field and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#PB_HELPER">PB_HELPER</a></span></code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
-<caption><span>Methods in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a> that return <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createPBHelper--">createPBHelper</a></span>()</code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-<!-- ======= START OF BOTTOM NAVBAR ====== -->
-<div class="bottomNav"><a name="navbar.bottom">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.bottom.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">Class</a></li>
-<li class="navBarCell1Rev">Use</li>
-<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
-<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li>Prev</li>
-<li>Next</li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_bottom">
-<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_bottom");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<a name="skip.navbar.bottom">
-<!--   -->
-</a></div>
-<!-- ======== END OF BOTTOM NAVBAR ======= -->
-<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2019 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
-</body>
-</html>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html
deleted file mode 100644
index 9963dd1..0000000
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html
+++ /dev/null
@@ -1,186 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!-- NewPage -->
-<html lang="en">
-<head>
-<!-- Generated by javadoc -->
-<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter (Apache HBase 3.0.0-SNAPSHOT API)</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
-<script type="text/javascript" src="../../../../../../../script.js"></script>
-</head>
-<body>
-<script type="text/javascript"><!--
-    try {
-        if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter (Apache HBase 3.0.0-SNAPSHOT API)";
-        }
-    }
-    catch(err) {
-    }
-//-->
-</script>
-<noscript>
-<div>JavaScript is disabled on your browser.</div>
-</noscript>
-<!-- ========= START OF TOP NAVBAR ======= -->
-<div class="topNav"><a name="navbar.top">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.top.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">Class</a></li>
-<li class="navBarCell1Rev">Use</li>
-<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
-<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li>Prev</li>
-<li>Next</li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_top">
-<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_top");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<a name="skip.navbar.top">
-<!--   -->
-</a></div>
-<!-- ========= END OF TOP NAVBAR ========= -->
-<div class="header">
-<h2 title="Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter" class="title">Uses of Interface<br>org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</h2>
-</div>
-<div class="classUseContainer">
-<ul class="blockList">
-<li class="blockList">
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
-<caption><span>Packages that use <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Package</th>
-<th class="colLast" scope="col">Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><a href="#org.apache.hadoop.hbase.io.asyncfs">org.apache.hadoop.hbase.io.asyncfs</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-<li class="blockList">
-<ul class="blockList">
-<li class="blockList"><a name="org.apache.hadoop.hbase.io.asyncfs">
-<!--   -->
-</a>
-<h3>Uses of <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a> in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></h3>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
-<caption><span>Fields in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a> declared as <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Field and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#PIPELINE_ACK_STATUS_GETTER">PIPELINE_ACK_STATUS_GETTER</a></span></code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
-<caption><span>Methods in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a> that return <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createPipelineAckStatusGetter--">createPipelineAckStatusGetter</a></span>()</code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createPipelineAckStatusGetter26--">createPipelineAckStatusGetter26</a></span>()</code>&nbsp;</td>
-</tr>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createPipelineAckStatusGetter27--">createPipelineAckStatusGetter27</a></span>()</code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-<!-- ======= START OF BOTTOM NAVBAR ====== -->
-<div class="bottomNav"><a name="navbar.bottom">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.bottom.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">Class</a></li>
-<li class="navBarCell1Rev">Use</li>
-<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
-<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li>Prev</li>
-<li>Next</li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_bottom">
-<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_bottom");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<a name="skip.navbar.bottom">
-<!--   -->
-</a></div>
-<!-- ======== END OF BOTTOM NAVBAR ======= -->
-<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2019 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
-</body>
-</html>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html
deleted file mode 100644
index 765f1c0..0000000
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html
+++ /dev/null
@@ -1,178 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!-- NewPage -->
-<html lang="en">
-<head>
-<!-- Generated by javadoc -->
-<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter (Apache HBase 3.0.0-SNAPSHOT API)</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
-<script type="text/javascript" src="../../../../../../../script.js"></script>
-</head>
-<body>
-<script type="text/javascript"><!--
-    try {
-        if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter (Apache HBase 3.0.0-SNAPSHOT API)";
-        }
-    }
-    catch(err) {
-    }
-//-->
-</script>
-<noscript>
-<div>JavaScript is disabled on your browser.</div>
-</noscript>
-<!-- ========= START OF TOP NAVBAR ======= -->
-<div class="topNav"><a name="navbar.top">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.top.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">Class</a></li>
-<li class="navBarCell1Rev">Use</li>
-<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
-<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li>Prev</li>
-<li>Next</li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_top">
-<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_top");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<a name="skip.navbar.top">
-<!--   -->
-</a></div>
-<!-- ========= END OF TOP NAVBAR ========= -->
-<div class="header">
-<h2 title="Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter" class="title">Uses of Interface<br>org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</h2>
-</div>
-<div class="classUseContainer">
-<ul class="blockList">
-<li class="blockList">
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
-<caption><span>Packages that use <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Package</th>
-<th class="colLast" scope="col">Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><a href="#org.apache.hadoop.hbase.io.asyncfs">org.apache.hadoop.hbase.io.asyncfs</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-<li class="blockList">
-<ul class="blockList">
-<li class="blockList"><a name="org.apache.hadoop.hbase.io.asyncfs">
-<!--   -->
-</a>
-<h3>Uses of <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</a> in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></h3>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
-<caption><span>Fields in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a> declared as <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Field and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#STORAGE_TYPE_SETTER">STORAGE_TYPE_SETTER</a></span></code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
-<caption><span>Methods in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a> that return <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createStorageTypeSetter--">createStorageTypeSetter</a></span>()</code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-<!-- ======= START OF BOTTOM NAVBAR ====== -->
-<div class="bottomNav"><a name="navbar.bottom">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.bottom.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">Class</a></li>
-<li class="navBarCell1Rev">Use</li>
-<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
-<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li>Prev</li>
-<li>Next</li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_bottom">
-<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_bottom");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<a name="skip.navbar.bottom">
-<!--   -->
-</a></div>
-<!-- ======== END OF BOTTOM NAVBAR ======= -->
-<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2019 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
-</body>
-</html>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html
deleted file mode 100644
index c585f71..0000000
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html
+++ /dev/null
@@ -1,178 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!-- NewPage -->
-<html lang="en">
-<head>
-<!-- Generated by javadoc -->
-<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-<title>Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper (Apache HBase 3.0.0-SNAPSHOT API)</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
-<script type="text/javascript" src="../../../../../../../script.js"></script>
-</head>
-<body>
-<script type="text/javascript"><!--
-    try {
-        if (location.href.indexOf('is-external=true') == -1) {
-            parent.document.title="Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper (Apache HBase 3.0.0-SNAPSHOT API)";
-        }
-    }
-    catch(err) {
-    }
-//-->
-</script>
-<noscript>
-<div>JavaScript is disabled on your browser.</div>
-</noscript>
-<!-- ========= START OF TOP NAVBAR ======= -->
-<div class="topNav"><a name="navbar.top">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.top.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">Class</a></li>
-<li class="navBarCell1Rev">Use</li>
-<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
-<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li>Prev</li>
-<li>Next</li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_top">
-<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_top");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<a name="skip.navbar.top">
-<!--   -->
-</a></div>
-<!-- ========= END OF TOP NAVBAR ========= -->
-<div class="header">
-<h2 title="Uses of Interface org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper" class="title">Uses of Interface<br>org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</h2>
-</div>
-<div class="classUseContainer">
-<ul class="blockList">
-<li class="blockList">
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
-<caption><span>Packages that use <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Package</th>
-<th class="colLast" scope="col">Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><a href="#org.apache.hadoop.hbase.io.asyncfs">org.apache.hadoop.hbase.io.asyncfs</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-<li class="blockList">
-<ul class="blockList">
-<li class="blockList"><a name="org.apache.hadoop.hbase.io.asyncfs">
-<!--   -->
-</a>
-<h3>Uses of <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a> in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a></h3>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation">
-<caption><span>Fields in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a> declared as <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Field and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputSaslHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#PB_HELPER">PB_HELPER</a></span></code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
-<caption><span>Methods in <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/package-summary.html">org.apache.hadoop.hbase.io.asyncfs</a> that return <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a></span><span class="tabEnd">&nbsp;</span></caption>
-<tr>
-<th class="colFirst" scope="col">Modifier and Type</th>
-<th class="colLast" scope="col">Method and Description</th>
-</tr>
-<tbody>
-<tr class="altColor">
-<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputSaslHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createPBHelper--">createPBHelper</a></span>()</code>&nbsp;</td>
-</tr>
-</tbody>
-</table>
-</li>
-</ul>
-</li>
-</ul>
-</div>
-<!-- ======= START OF BOTTOM NAVBAR ====== -->
-<div class="bottomNav"><a name="navbar.bottom">
-<!--   -->
-</a>
-<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
-<a name="navbar.bottom.firstrow">
-<!--   -->
-</a>
-<ul class="navList" title="Navigation">
-<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
-<li><a href="../package-summary.html">Package</a></li>
-<li><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">Class</a></li>
-<li class="navBarCell1Rev">Use</li>
-<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
-<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
-<li><a href="../../../../../../../index-all.html">Index</a></li>
-<li><a href="../../../../../../../help-doc.html">Help</a></li>
-</ul>
-</div>
-<div class="subNav">
-<ul class="navList">
-<li>Prev</li>
-<li>Next</li>
-</ul>
-<ul class="navList">
-<li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" target="_top">Frames</a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" target="_top">No&nbsp;Frames</a></li>
-</ul>
-<ul class="navList" id="allclasses_navbar_bottom">
-<li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li>
-</ul>
-<div>
-<script type="text/javascript"><!--
-  allClassesLink = document.getElementById("allclasses_navbar_bottom");
-  if(window==top) {
-    allClassesLink.style.display = "block";
-  }
-  else {
-    allClassesLink.style.display = "none";
-  }
-  //-->
-</script>
-</div>
-<a name="skip.navbar.bottom">
-<!--   -->
-</a></div>
-<!-- ======== END OF BOTTOM NAVBAR ======= -->
-<p class="legalCopy"><small>Copyright &#169; 2007&#x2013;2019 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
-</body>
-</html>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html
index 16a1f3d..448047d 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html
@@ -121,11 +121,11 @@
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputSaslHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createTransparentCryptoHelper27--">createTransparentCryptoHelper27</a></span>()</code>&nbsp;</td>
+<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputSaslHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createTransparentCryptoHelperWithHDFS12396--">createTransparentCryptoHelperWithHDFS12396</a></span>()</code>&nbsp;</td>
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</a></code></td>
-<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputSaslHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createTransparentCryptoHelper28--">createTransparentCryptoHelper28</a></span>()</code>&nbsp;</td>
+<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputSaslHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.html#createTransparentCryptoHelperWithoutHDFS12396--">createTransparentCryptoHelperWithoutHDFS12396</a></span>()</code>&nbsp;</td>
 </tr>
 </tbody>
 </table>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-frame.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-frame.html
index 31920f3..72f0f1d 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-frame.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-frame.html
@@ -14,15 +14,9 @@
 <h2 title="Interfaces">Interfaces</h2>
 <ul title="Interfaces">
 <li><a href="AsyncFSOutput.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">AsyncFSOutput</span></a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</span></a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</span></a></li>
 <li><a href="FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</span></a></li>
 <li><a href="FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</span></a></li>
 <li><a href="FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.LeaseManager</span></a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</span></a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</span></a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</span></a></li>
-<li><a href="FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</span></a></li>
 <li><a href="FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor</span></a></li>
 <li><a href="FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs" target="classFrame"><span class="interfaceName">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</span></a></li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-summary.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-summary.html
index 8ce91ae..73e6af8 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-summary.html
@@ -89,14 +89,6 @@
 </td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-<tr class="altColor">
-<td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-<tr class="rowColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
@@ -109,22 +101,6 @@
 <td class="colLast">&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-<tr class="altColor">
-<td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a></td>
-<td class="colLast">&nbsp;</td>
-</tr>
-<tr class="altColor">
 <td class="colFirst"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor</a></td>
 <td class="colLast">&nbsp;</td>
 </tr>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-tree.html
index b9ac4c3..893aea2 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-tree.html
@@ -147,15 +147,9 @@
 </li>
 </ul>
 </li>
-<li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.LeaseManager</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</span></a></li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-use.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-use.html
index 64cc8bf..825ea9e 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/package-use.html
@@ -120,12 +120,6 @@
 <td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutput.State.html#org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput.State</a>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html#org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</a>&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html#org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</a>&nbsp;</td>
-</tr>
-<tr class="altColor">
 <td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html#org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</a>&nbsp;</td>
 </tr>
 <tr class="rowColor">
@@ -135,18 +129,6 @@
 <td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html#org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.LeaseManager</a>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html#org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</a>&nbsp;</td>
-</tr>
-<tr class="altColor">
-<td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html#org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</a>&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html#org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</a>&nbsp;</td>
-</tr>
-<tr class="altColor">
-<td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html#org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</a>&nbsp;</td>
-</tr>
-<tr class="rowColor">
 <td class="colOne"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html#org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor</a>&nbsp;</td>
 </tr>
 <tr class="altColor">
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index 447e642..94a3adb 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -283,12 +283,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheFactory.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockCacheFactory.ExternalBlockCaches</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Cacheable.MemoryType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheFactory.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockCacheFactory.ExternalBlockCaches</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index 148fecd..53b29e4 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -353,9 +353,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallAction</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.SourceStorage.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">MetricsHBaseServerSourceFactoryImpl.SourceStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/CallEvent.Type.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">CallEvent.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallAction</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index 5af518b..03452ce 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -297,10 +297,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/CellCounter.CellCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">CellCounter.CellCounterMapper.Counters</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/CellCounter.CellCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">CellCounter.CellCounterMapper.Counters</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html
index f63d55a..e0b322d 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/assignment/package-tree.html
@@ -148,8 +148,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.assignment.<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.TransitionType.html" title="enum in org.apache.hadoop.hbase.master.assignment"><span class="typeNameLink">TransitRegionStateProcedure.TransitionType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.assignment.<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/ServerState.html" title="enum in org.apache.hadoop.hbase.master.assignment"><span class="typeNameLink">ServerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.assignment.<a href="../../../../../../org/apache/hadoop/hbase/master/assignment/TransitRegionStateProcedure.TransitionType.html" title="enum in org.apache.hadoop.hbase.master.assignment"><span class="typeNameLink">TransitRegionStateProcedure.TransitionType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index 9673a5d..662089e 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -355,11 +355,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">RegionState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index 7b99c99..c5ce7d6 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -216,10 +216,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">TableProcedureInterface.TableOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MetaProcedureInterface.MetaOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">MetaProcedureInterface.MetaOperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">TableProcedureInterface.TableOperationType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index 95aad40..34f5a46 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -433,19 +433,19 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index c5b1098..3613612 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -217,9 +217,9 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">StateMachineProcedure.Flow</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.LockState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">Procedure.LockState</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index 3151486..aee6174 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -240,12 +240,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index ed357c7..f5a3bf9 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -715,20 +715,20 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index dc05e9f..45a338f 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -130,9 +130,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.MatchCode.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">ScanQueryMatcher.MatchCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
index 5ef6472..19c9c93 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
@@ -247,10 +247,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/CompressionContext.DictionaryIndex.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">CompressionContext.DictionaryIndex</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrResult.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">ProtobufLogReader.WALHdrResult</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.Type.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">RingBufferTruck.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.RollRequestReason.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">WALActionsListener.RollRequestReason</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/CompressionContext.DictionaryIndex.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">CompressionContext.DictionaryIndex</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.Type.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">RingBufferTruck.Type</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
index 7d70035..3aa879d 100644
--- a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
@@ -110,8 +110,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.FilterType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.FilterType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
index 6db1464..04498a7 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
@@ -143,9 +143,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessController.OpType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Action.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Action</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Scope.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Scope</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
index 3f42ad7..3e32f6b 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
@@ -192,9 +192,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">AuthMethod</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslUtil.QualityOfProtection.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslUtil.QualityOfProtection</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslStatus</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
index 699a75f..47e3265 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
@@ -207,8 +207,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ImplType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ImplType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftMetrics.ThriftServerType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftMetrics.ThriftServerType</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index d4877df..0f362e3 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -542,14 +542,14 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLock.ReferenceType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HBaseFsck.ErrorReporter.ERROR_CODE</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HBaseFsck.ErrorReporter.ERROR_CODE</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLock.ReferenceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
index 951fb0c..cb3db448 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
@@ -191,8 +191,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.Providers.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALFactory.Providers</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/RegionGroupingProvider.Strategies.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">RegionGroupingProvider.Strategies</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.Providers.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALFactory.Providers</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/overview-tree.html b/devapidocs/overview-tree.html
index 75485a2..eb18c20 100644
--- a/devapidocs/overview-tree.html
+++ b/devapidocs/overview-tree.html
@@ -4879,15 +4879,9 @@
 <li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="org/apache/hadoop/hbase/coprocessor/EndpointObserver.html" title="interface in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">EndpointObserver</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/EnvironmentEdge.html" title="interface in org.apache.hadoop.hbase.util"><span class="typeNameLink">EnvironmentEdge</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.visibility.expression.<a href="org/apache/hadoop/hbase/security/visibility/expression/ExpressionNode.html" title="interface in org.apache.hadoop.hbase.security.visibility.expression"><span class="typeNameLink">ExpressionNode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.BlockAdder</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.LeaseManager</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.PBHelper</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.PipelineAckStatusGetter</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputHelper.StorageTypeSetter</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputSaslHelper.PBHelper</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputSaslHelper.SaslAdaptor</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper.html" title="interface in org.apache.hadoop.hbase.io.asyncfs"><span class="typeNameLink">FanOutOneBlockAsyncDFSOutputSaslHelper.TransparentCryptoHelper</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/FavoredNodesForRegion.html" title="interface in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FavoredNodesForRegion</span></a>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index 69b264b..b33b93d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -18,9 +18,9 @@
 <span class="sourceLineNo">010</span>  public static final String version = "3.0.0-SNAPSHOT";<a name="line.10"></a>
 <span class="sourceLineNo">011</span>  public static final String revision = "Unknown";<a name="line.11"></a>
 <span class="sourceLineNo">012</span>  public static final String user = "jenkins";<a name="line.12"></a>
-<span class="sourceLineNo">013</span>  public static final String date = "Fri May 17 14:35:11 UTC 2019";<a name="line.13"></a>
+<span class="sourceLineNo">013</span>  public static final String date = "Sat May 18 14:34:54 UTC 2019";<a name="line.13"></a>
 <span class="sourceLineNo">014</span>  public static final String url = "file:///home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";<a name="line.14"></a>
-<span class="sourceLineNo">015</span>  public static final String srcChecksum = "652d292d831152f5ab758be46fc6153c";<a name="line.15"></a>
+<span class="sourceLineNo">015</span>  public static final String srcChecksum = "a44f407917e1e878dfbdf9c53f3a3781";<a name="line.15"></a>
 <span class="sourceLineNo">016</span>}<a name="line.16"></a>
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
deleted file mode 100644
index eb6a26e..0000000
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.BlockAdder.html
+++ /dev/null
@@ -1,969 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<html lang="en">
-<head>
-<title>Source code</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
-</head>
-<body>
-<div class="sourceContainer">
-<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a>
-<span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a>
-<span class="sourceLineNo">003</span> * or more contributor license agreements.  See the NOTICE file<a name="line.3"></a>
-<span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a>
-<span class="sourceLineNo">005</span> * regarding copyright ownership.  The ASF licenses this file<a name="line.5"></a>
-<span class="sourceLineNo">006</span> * to you under the Apache License, Version 2.0 (the<a name="line.6"></a>
-<span class="sourceLineNo">007</span> * "License"); you may not use this file except in compliance<a name="line.7"></a>
-<span class="sourceLineNo">008</span> * with the License.  You may obtain a copy of the License at<a name="line.8"></a>
-<span class="sourceLineNo">009</span> *<a name="line.9"></a>
-<span class="sourceLineNo">010</span> *     http://www.apache.org/licenses/LICENSE-2.0<a name="line.10"></a>
-<span class="sourceLineNo">011</span> *<a name="line.11"></a>
-<span class="sourceLineNo">012</span> * Unless required by applicable law or agreed to in writing, software<a name="line.12"></a>
-<span class="sourceLineNo">013</span> * distributed under the License is distributed on an "AS IS" BASIS,<a name="line.13"></a>
-<span class="sourceLineNo">014</span> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<a name="line.14"></a>
-<span class="sourceLineNo">015</span> * See the License for the specific language governing permissions and<a name="line.15"></a>
-<span class="sourceLineNo">016</span> * limitations under the License.<a name="line.16"></a>
-<span class="sourceLineNo">017</span> */<a name="line.17"></a>
-<span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.io.asyncfs;<a name="line.18"></a>
-<span class="sourceLineNo">019</span><a name="line.19"></a>
-<span class="sourceLineNo">020</span>import static org.apache.hadoop.fs.CreateFlag.CREATE;<a name="line.20"></a>
-<span class="sourceLineNo">021</span>import static org.apache.hadoop.fs.CreateFlag.OVERWRITE;<a name="line.21"></a>
-<span class="sourceLineNo">022</span>import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createEncryptor;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.trySaslNegotiate;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import static org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage.PIPELINE_SETUP_CREATE;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import static org.apache.hbase.thirdparty.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import static org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE;<a name="line.29"></a>
-<span class="sourceLineNo">030</span><a name="line.30"></a>
-<span class="sourceLineNo">031</span>import com.google.protobuf.CodedOutputStream;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import java.io.IOException;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import java.io.InterruptedIOException;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import java.lang.reflect.InvocationTargetException;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import java.lang.reflect.Method;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import java.util.ArrayList;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import java.util.EnumSet;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import java.util.List;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import java.util.concurrent.TimeUnit;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.conf.Configuration;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.crypto.CryptoProtocolVersion;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.crypto.Encryptor;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.fs.CreateFlag;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.fs.FileSystem;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.fs.FileSystemLinkResolver;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.fs.Path;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.fs.UnresolvedLinkException;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hdfs.DFSOutputStream;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hdfs.protocol.ClientProtocol;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hdfs.protocol.DatanodeInfo;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hdfs.protocol.ExtendedBlock;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hdfs.protocol.LocatedBlock;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hdfs.protocol.datatransfer.Op;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.io.EnumSetWritable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.net.NetUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.security.token.Token;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.util.DataChecksum;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.slf4j.Logger;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.slf4j.LoggerFactory;<a name="line.87"></a>
-<span class="sourceLineNo">088</span><a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.io.netty.bootstrap.Bootstrap;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufAllocator;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufOutputStream;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.io.netty.buffer.PooledByteBufAllocator;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.io.netty.channel.Channel;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelFuture;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelFutureListener;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandler;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelInitializer;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelPipeline;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hbase.thirdparty.io.netty.channel.EventLoop;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufDecoder;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateHandler;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.Future;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.FutureListener;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise;<a name="line.112"></a>
-<span class="sourceLineNo">113</span><a name="line.113"></a>
-<span class="sourceLineNo">114</span>/**<a name="line.114"></a>
-<span class="sourceLineNo">115</span> * Helper class for implementing {@link FanOutOneBlockAsyncDFSOutput}.<a name="line.115"></a>
-<span class="sourceLineNo">116</span> */<a name="line.116"></a>
-<span class="sourceLineNo">117</span>@InterfaceAudience.Private<a name="line.117"></a>
-<span class="sourceLineNo">118</span>public final class FanOutOneBlockAsyncDFSOutputHelper {<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  private static final Logger LOG =<a name="line.119"></a>
-<span class="sourceLineNo">120</span>      LoggerFactory.getLogger(FanOutOneBlockAsyncDFSOutputHelper.class);<a name="line.120"></a>
-<span class="sourceLineNo">121</span><a name="line.121"></a>
-<span class="sourceLineNo">122</span>  private FanOutOneBlockAsyncDFSOutputHelper() {<a name="line.122"></a>
-<span class="sourceLineNo">123</span>  }<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>  public static final String ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES = "hbase.fs.async.create.retries";<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  public static final int DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES = 10;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  // use pooled allocator for performance.<a name="line.128"></a>
-<span class="sourceLineNo">129</span>  private static final ByteBufAllocator ALLOC = PooledByteBufAllocator.DEFAULT;<a name="line.129"></a>
-<span class="sourceLineNo">130</span><a name="line.130"></a>
-<span class="sourceLineNo">131</span>  // copied from DFSPacket since it is package private.<a name="line.131"></a>
-<span class="sourceLineNo">132</span>  public static final long HEART_BEAT_SEQNO = -1L;<a name="line.132"></a>
-<span class="sourceLineNo">133</span><a name="line.133"></a>
-<span class="sourceLineNo">134</span>  // Timeouts for communicating with DataNode for streaming writes/reads<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  public static final int READ_TIMEOUT = 60 * 1000;<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>  private static final DatanodeInfo[] EMPTY_DN_ARRAY = new DatanodeInfo[0];<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>  // helper class for getting Status from PipelineAckProto. In hadoop 2.6 or before, there is a<a name="line.139"></a>
-<span class="sourceLineNo">140</span>  // getStatus method, and for hadoop 2.7 or after, the status is retrieved from flag. The flag may<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  // get from proto directly, or combined by the reply field of the proto and a ECN object. See<a name="line.141"></a>
-<span class="sourceLineNo">142</span>  // createPipelineAckStatusGetter for more details.<a name="line.142"></a>
-<span class="sourceLineNo">143</span>  private interface PipelineAckStatusGetter {<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    Status get(PipelineAckProto ack);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  private static final PipelineAckStatusGetter PIPELINE_ACK_STATUS_GETTER;<a name="line.147"></a>
-<span class="sourceLineNo">148</span><a name="line.148"></a>
-<span class="sourceLineNo">149</span>  // StorageType enum is placed under o.a.h.hdfs in hadoop 2.6 and o.a.h.fs in hadoop 2.7. So here<a name="line.149"></a>
-<span class="sourceLineNo">150</span>  // we need to use reflection to set it.See createStorageTypeSetter for more details.<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  private interface StorageTypeSetter {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    OpWriteBlockProto.Builder set(OpWriteBlockProto.Builder builder, Enum&lt;?&gt; storageType);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>  private static final StorageTypeSetter STORAGE_TYPE_SETTER;<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>  // helper class for calling add block method on namenode. There is a addBlockFlags parameter for<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  // hadoop 2.8 or later. See createBlockAdder for more details.<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  private interface BlockAdder {<a name="line.159"></a>
-<span class="sourceLineNo">160</span><a name="line.160"></a>
-<span class="sourceLineNo">161</span>    LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,<a name="line.161"></a>
-<span class="sourceLineNo">162</span>        ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId, String[] favoredNodes)<a name="line.162"></a>
-<span class="sourceLineNo">163</span>        throws IOException;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  }<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>  private static final BlockAdder BLOCK_ADDER;<a name="line.166"></a>
-<span class="sourceLineNo">167</span><a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private interface LeaseManager {<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>    void begin(DFSClient client, long inodeId);<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>    void end(DFSClient client, long inodeId);<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>  private static final LeaseManager LEASE_MANAGER;<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>  // This is used to terminate a recoverFileLease call when FileSystem is already closed.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  // isClientRunning is not public so we need to use reflection.<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  private interface DFSClientAdaptor {<a name="line.179"></a>
-<span class="sourceLineNo">180</span><a name="line.180"></a>
-<span class="sourceLineNo">181</span>    boolean isClientRunning(DFSClient client);<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  }<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>  private static final DFSClientAdaptor DFS_CLIENT_ADAPTOR;<a name="line.184"></a>
-<span class="sourceLineNo">185</span><a name="line.185"></a>
-<span class="sourceLineNo">186</span>  // helper class for convert protos.<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  private interface PBHelper {<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>    ExtendedBlockProto convert(ExtendedBlock b);<a name="line.189"></a>
-<span class="sourceLineNo">190</span><a name="line.190"></a>
-<span class="sourceLineNo">191</span>    TokenProto convert(Token&lt;?&gt; tok);<a name="line.191"></a>
-<span class="sourceLineNo">192</span>  }<a name="line.192"></a>
-<span class="sourceLineNo">193</span><a name="line.193"></a>
-<span class="sourceLineNo">194</span>  private static final PBHelper PB_HELPER;<a name="line.194"></a>
-<span class="sourceLineNo">195</span><a name="line.195"></a>
-<span class="sourceLineNo">196</span>  // helper class for creating data checksum.<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private interface ChecksumCreater {<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    DataChecksum createChecksum(DFSClient client);<a name="line.198"></a>
-<span class="sourceLineNo">199</span>  }<a name="line.199"></a>
-<span class="sourceLineNo">200</span><a name="line.200"></a>
-<span class="sourceLineNo">201</span>  private static final ChecksumCreater CHECKSUM_CREATER;<a name="line.201"></a>
-<span class="sourceLineNo">202</span><a name="line.202"></a>
-<span class="sourceLineNo">203</span>  // helper class for creating files.<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  private interface FileCreator {<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    default HdfsFileStatus create(ClientProtocol instance, String src, FsPermission masked,<a name="line.205"></a>
-<span class="sourceLineNo">206</span>        String clientName, EnumSetWritable&lt;CreateFlag&gt; flag, boolean createParent,<a name="line.206"></a>
-<span class="sourceLineNo">207</span>        short replication, long blockSize, CryptoProtocolVersion[] supportedVersions)<a name="line.207"></a>
-<span class="sourceLineNo">208</span>        throws Exception {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      try {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        return (HdfsFileStatus) createObject(instance, src, masked, clientName, flag, createParent,<a name="line.210"></a>
-<span class="sourceLineNo">211</span>          replication, blockSize, supportedVersions);<a name="line.211"></a>
-<span class="sourceLineNo">212</span>      } catch (InvocationTargetException e) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        if (e.getCause() instanceof Exception) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>          throw (Exception) e.getCause();<a name="line.214"></a>
-<span class="sourceLineNo">215</span>        } else {<a name="line.215"></a>
-<span class="sourceLineNo">216</span>          throw new RuntimeException(e.getCause());<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        }<a name="line.217"></a>
-<span class="sourceLineNo">218</span>      }<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    }<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>    Object createObject(ClientProtocol instance, String src, FsPermission masked, String clientName,<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        EnumSetWritable&lt;CreateFlag&gt; flag, boolean createParent, short replication, long blockSize,<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        CryptoProtocolVersion[] supportedVersions) throws Exception;<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  }<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>  private static final FileCreator FILE_CREATOR;<a name="line.226"></a>
-<span class="sourceLineNo">227</span><a name="line.227"></a>
-<span class="sourceLineNo">228</span>  private static DFSClientAdaptor createDFSClientAdaptor() throws NoSuchMethodException {<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    Method isClientRunningMethod = DFSClient.class.getDeclaredMethod("isClientRunning");<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    isClientRunningMethod.setAccessible(true);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    return new DFSClientAdaptor() {<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>      @Override<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      public boolean isClientRunning(DFSClient client) {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        try {<a name="line.235"></a>
-<span class="sourceLineNo">236</span>          return (Boolean) isClientRunningMethod.invoke(client);<a name="line.236"></a>
-<span class="sourceLineNo">237</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.237"></a>
-<span class="sourceLineNo">238</span>          throw new RuntimeException(e);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>        }<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      }<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    };<a name="line.241"></a>
-<span class="sourceLineNo">242</span>  }<a name="line.242"></a>
-<span class="sourceLineNo">243</span><a name="line.243"></a>
-<span class="sourceLineNo">244</span>  private static LeaseManager createLeaseManager() throws NoSuchMethodException {<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    Method beginFileLeaseMethod =<a name="line.245"></a>
-<span class="sourceLineNo">246</span>        DFSClient.class.getDeclaredMethod("beginFileLease", long.class, DFSOutputStream.class);<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    beginFileLeaseMethod.setAccessible(true);<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    Method endFileLeaseMethod = DFSClient.class.getDeclaredMethod("endFileLease", long.class);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    endFileLeaseMethod.setAccessible(true);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    return new LeaseManager() {<a name="line.250"></a>
-<span class="sourceLineNo">251</span><a name="line.251"></a>
-<span class="sourceLineNo">252</span>      @Override<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      public void begin(DFSClient client, long inodeId) {<a name="line.253"></a>
-<span class="sourceLineNo">254</span>        try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          beginFileLeaseMethod.invoke(client, inodeId, null);<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          throw new RuntimeException(e);<a name="line.257"></a>
-<span class="sourceLineNo">258</span>        }<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      }<a name="line.259"></a>
-<span class="sourceLineNo">260</span><a name="line.260"></a>
-<span class="sourceLineNo">261</span>      @Override<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      public void end(DFSClient client, long inodeId) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>        try {<a name="line.263"></a>
-<span class="sourceLineNo">264</span>          endFileLeaseMethod.invoke(client, inodeId);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>          throw new RuntimeException(e);<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        }<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      }<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    };<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private static PipelineAckStatusGetter createPipelineAckStatusGetter27()<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      throws NoSuchMethodException {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    Method getFlagListMethod = PipelineAckProto.class.getMethod("getFlagList");<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    @SuppressWarnings("rawtypes")<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    Class&lt;? extends Enum&gt; ecnClass;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    try {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      ecnClass = Class.forName("org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck$ECN")<a name="line.278"></a>
-<span class="sourceLineNo">279</span>          .asSubclass(Enum.class);<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    } catch (ClassNotFoundException e) {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      String msg = "Couldn't properly initialize the PipelineAck.ECN class. Please " +<a name="line.281"></a>
-<span class="sourceLineNo">282</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.282"></a>
-<span class="sourceLineNo">283</span>          "HBASE-16110 for more information.";<a name="line.283"></a>
-<span class="sourceLineNo">284</span>      LOG.error(msg, e);<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      throw new Error(msg, e);<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    @SuppressWarnings("unchecked")<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    Enum&lt;?&gt; disabledECN = Enum.valueOf(ecnClass, "DISABLED");<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    Method getReplyMethod = PipelineAckProto.class.getMethod("getReply", int.class);<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    Method combineHeaderMethod =<a name="line.290"></a>
-<span class="sourceLineNo">291</span>        PipelineAck.class.getMethod("combineHeader", ecnClass, Status.class);<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    Method getStatusFromHeaderMethod =<a name="line.292"></a>
-<span class="sourceLineNo">293</span>        PipelineAck.class.getMethod("getStatusFromHeader", int.class);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return new PipelineAckStatusGetter() {<a name="line.294"></a>
-<span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>      @Override<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      public Status get(PipelineAckProto ack) {<a name="line.297"></a>
-<span class="sourceLineNo">298</span>        try {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>          @SuppressWarnings("unchecked")<a name="line.299"></a>
-<span class="sourceLineNo">300</span>          List&lt;Integer&gt; flagList = (List&lt;Integer&gt;) getFlagListMethod.invoke(ack);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>          Integer headerFlag;<a name="line.301"></a>
-<span class="sourceLineNo">302</span>          if (flagList.isEmpty()) {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>            Status reply = (Status) getReplyMethod.invoke(ack, 0);<a name="line.303"></a>
-<span class="sourceLineNo">304</span>            headerFlag = (Integer) combineHeaderMethod.invoke(null, disabledECN, reply);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>          } else {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>            headerFlag = flagList.get(0);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>          }<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          return (Status) getStatusFromHeaderMethod.invoke(null, headerFlag);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          throw new RuntimeException(e);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>        }<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    };<a name="line.313"></a>
-<span class="sourceLineNo">314</span>  }<a name="line.314"></a>
-<span class="sourceLineNo">315</span><a name="line.315"></a>
-<span class="sourceLineNo">316</span>  private static PipelineAckStatusGetter createPipelineAckStatusGetter26()<a name="line.316"></a>
-<span class="sourceLineNo">317</span>      throws NoSuchMethodException {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    Method getStatusMethod = PipelineAckProto.class.getMethod("getStatus", int.class);<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    return new PipelineAckStatusGetter() {<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>      @Override<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      public Status get(PipelineAckProto ack) {<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        try {<a name="line.323"></a>
-<span class="sourceLineNo">324</span>          return (Status) getStatusMethod.invoke(ack, 0);<a name="line.324"></a>
-<span class="sourceLineNo">325</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>          throw new RuntimeException(e);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>        }<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      }<a name="line.328"></a>
-<span class="sourceLineNo">329</span>    };<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  }<a name="line.330"></a>
-<span class="sourceLineNo">331</span><a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private static PipelineAckStatusGetter createPipelineAckStatusGetter()<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      throws NoSuchMethodException {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    try {<a name="line.334"></a>
-<span class="sourceLineNo">335</span>      return createPipelineAckStatusGetter27();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    } catch (NoSuchMethodException e) {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>      LOG.debug("Can not get expected method " + e.getMessage() +<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          ", this usually because your Hadoop is pre 2.7.0, " +<a name="line.338"></a>
-<span class="sourceLineNo">339</span>          "try the methods in Hadoop 2.6.x instead.");<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    }<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    return createPipelineAckStatusGetter26();<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  }<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private static StorageTypeSetter createStorageTypeSetter() throws NoSuchMethodException {<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    Method setStorageTypeMethod =<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        OpWriteBlockProto.Builder.class.getMethod("setStorageType", StorageTypeProto.class);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    ImmutableMap.Builder&lt;String, StorageTypeProto&gt; builder = ImmutableMap.builder();<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    for (StorageTypeProto storageTypeProto : StorageTypeProto.values()) {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      builder.put(storageTypeProto.name(), storageTypeProto);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    }<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    ImmutableMap&lt;String, StorageTypeProto&gt; name2ProtoEnum = builder.build();<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    return new StorageTypeSetter() {<a name="line.352"></a>
-<span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>      @Override<a name="line.354"></a>
-<span class="sourceLineNo">355</span>      public OpWriteBlockProto.Builder set(OpWriteBlockProto.Builder builder, Enum&lt;?&gt; storageType) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>        Object protoEnum = name2ProtoEnum.get(storageType.name());<a name="line.356"></a>
-<span class="sourceLineNo">357</span>        try {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>          setStorageTypeMethod.invoke(builder, protoEnum);<a name="line.358"></a>
-<span class="sourceLineNo">359</span>        } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {<a name="line.359"></a>
-<span class="sourceLineNo">360</span>          throw new RuntimeException(e);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>        }<a name="line.361"></a>
-<span class="sourceLineNo">362</span>        return builder;<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      }<a name="line.363"></a>
-<span class="sourceLineNo">364</span>    };<a name="line.364"></a>
-<span class="sourceLineNo">365</span>  }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>  private static BlockAdder createBlockAdder() throws NoSuchMethodException {<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    for (Method method : ClientProtocol.class.getMethods()) {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      if (method.getName().equals("addBlock")) {<a name="line.369"></a>
-<span class="sourceLineNo">370</span>        Method addBlockMethod = method;<a name="line.370"></a>
-<span class="sourceLineNo">371</span>        Class&lt;?&gt;[] paramTypes = addBlockMethod.getParameterTypes();<a name="line.371"></a>
-<span class="sourceLineNo">372</span>        if (paramTypes[paramTypes.length - 1] == String[].class) {<a name="line.372"></a>
-<span class="sourceLineNo">373</span>          return new BlockAdder() {<a name="line.373"></a>
-<span class="sourceLineNo">374</span><a name="line.374"></a>
-<span class="sourceLineNo">375</span>            @Override<a name="line.375"></a>
-<span class="sourceLineNo">376</span>            public LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>                ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId,<a name="line.377"></a>
-<span class="sourceLineNo">378</span>                String[] favoredNodes) throws IOException {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>              try {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>                return (LocatedBlock) addBlockMethod.invoke(namenode, src, clientName, previous,<a name="line.380"></a>
-<span class="sourceLineNo">381</span>                  excludeNodes, fileId, favoredNodes);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>              } catch (IllegalAccessException e) {<a name="line.382"></a>
-<span class="sourceLineNo">383</span>                throw new RuntimeException(e);<a name="line.383"></a>
-<span class="sourceLineNo">384</span>              } catch (InvocationTargetException e) {<a name="line.384"></a>
-<span class="sourceLineNo">385</span>                Throwables.propagateIfPossible(e.getTargetException(), IOException.class);<a name="line.385"></a>
-<span class="sourceLineNo">386</span>                throw new RuntimeException(e);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>              }<a name="line.387"></a>
-<span class="sourceLineNo">388</span>            }<a name="line.388"></a>
-<span class="sourceLineNo">389</span>          };<a name="line.389"></a>
-<span class="sourceLineNo">390</span>        } else {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>          return new BlockAdder() {<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>            @Override<a name="line.393"></a>
-<span class="sourceLineNo">394</span>            public LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,<a name="line.394"></a>
-<span class="sourceLineNo">395</span>                ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId,<a name="line.395"></a>
-<span class="sourceLineNo">396</span>                String[] favoredNodes) throws IOException {<a name="line.396"></a>
-<span class="sourceLineNo">397</span>              try {<a name="line.397"></a>
-<span class="sourceLineNo">398</span>                return (LocatedBlock) addBlockMethod.invoke(namenode, src, clientName, previous,<a name="line.398"></a>
-<span class="sourceLineNo">399</span>                  excludeNodes, fileId, favoredNodes, null);<a name="line.399"></a>
-<span class="sourceLineNo">400</span>              } catch (IllegalAccessException e) {<a name="line.400"></a>
-<span class="sourceLineNo">401</span>                throw new RuntimeException(e);<a name="line.401"></a>
-<span class="sourceLineNo">402</span>              } catch (InvocationTargetException e) {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>                Throwables.propagateIfPossible(e.getTargetException(), IOException.class);<a name="line.403"></a>
-<span class="sourceLineNo">404</span>                throw new RuntimeException(e);<a name="line.404"></a>
-<span class="sourceLineNo">405</span>              }<a name="line.405"></a>
-<span class="sourceLineNo">406</span>            }<a name="line.406"></a>
-<span class="sourceLineNo">407</span>          };<a name="line.407"></a>
-<span class="sourceLineNo">408</span>        }<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      }<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    }<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    throw new NoSuchMethodException("Can not find addBlock method in ClientProtocol");<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  private static PBHelper createPBHelper() throws NoSuchMethodException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    Class&lt;?&gt; helperClass;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    String clazzName = "org.apache.hadoop.hdfs.protocolPB.PBHelperClient";<a name="line.416"></a>
-<span class="sourceLineNo">417</span>    try {<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      helperClass = Class.forName(clazzName);<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    } catch (ClassNotFoundException e) {<a name="line.419"></a>
-<span class="sourceLineNo">420</span>      helperClass = org.apache.hadoop.hdfs.protocolPB.PBHelper.class;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      LOG.debug("" + clazzName + " not found (Hadoop is pre-2.8.0?); using " +<a name="line.421"></a>
-<span class="sourceLineNo">422</span>          helperClass.toString() + " instead.");<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    }<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    Method convertEBMethod = helperClass.getMethod("convert", ExtendedBlock.class);<a name="line.424"></a>
-<span class="sourceLineNo">425</span>    Method convertTokenMethod = helperClass.getMethod("convert", Token.class);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    return new PBHelper() {<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>      @Override<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      public ExtendedBlockProto convert(ExtendedBlock b) {<a name="line.429"></a>
-<span class="sourceLineNo">430</span>        try {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>          return (ExtendedBlockProto) convertEBMethod.invoke(null, b);<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.432"></a>
-<span class="sourceLineNo">433</span>          throw new RuntimeException(e);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>        }<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      }<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>      @Override<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      public TokenProto convert(Token&lt;?&gt; tok) {<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        try {<a name="line.439"></a>
-<span class="sourceLineNo">440</span>          return (TokenProto) convertTokenMethod.invoke(null, tok);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>          throw new RuntimeException(e);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        }<a name="line.443"></a>
-<span class="sourceLineNo">444</span>      }<a name="line.444"></a>
-<span class="sourceLineNo">445</span>    };<a name="line.445"></a>
-<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>  private static ChecksumCreater createChecksumCreater28(Method getConfMethod, Class&lt;?&gt; confClass)<a name="line.448"></a>
-<span class="sourceLineNo">449</span>      throws NoSuchMethodException {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    for (Method method : confClass.getMethods()) {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      if (method.getName().equals("createChecksum")) {<a name="line.451"></a>
-<span class="sourceLineNo">452</span>        Method createChecksumMethod = method;<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        return new ChecksumCreater() {<a name="line.453"></a>
-<span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>          @Override<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          public DataChecksum createChecksum(DFSClient client) {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>            try {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>              return (DataChecksum) createChecksumMethod.invoke(getConfMethod.invoke(client),<a name="line.458"></a>
-<span class="sourceLineNo">459</span>                (Object) null);<a name="line.459"></a>
-<span class="sourceLineNo">460</span>            } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.460"></a>
-<span class="sourceLineNo">461</span>              throw new RuntimeException(e);<a name="line.461"></a>
-<span class="sourceLineNo">462</span>            }<a name="line.462"></a>
-<span class="sourceLineNo">463</span>          }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>        };<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      }<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    }<a name="line.466"></a>
-<span class="sourceLineNo">467</span>    throw new NoSuchMethodException("Can not find createChecksum method in DfsClientConf");<a name="line.467"></a>
-<span class="sourceLineNo">468</span>  }<a name="line.468"></a>
-<span class="sourceLineNo">469</span><a name="line.469"></a>
-<span class="sourceLineNo">470</span>  private static ChecksumCreater createChecksumCreater27(Method getConfMethod, Class&lt;?&gt; confClass)<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      throws NoSuchMethodException {<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    Method createChecksumMethod = confClass.getDeclaredMethod("createChecksum");<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    createChecksumMethod.setAccessible(true);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    return new ChecksumCreater() {<a name="line.474"></a>
-<span class="sourceLineNo">475</span><a name="line.475"></a>
-<span class="sourceLineNo">476</span>      @Override<a name="line.476"></a>
-<span class="sourceLineNo">477</span>      public DataChecksum createChecksum(DFSClient client) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>        try {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>          return (DataChecksum) createChecksumMethod.invoke(getConfMethod.invoke(client));<a name="line.479"></a>
-<span class="sourceLineNo">480</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          throw new RuntimeException(e);<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      }<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    };<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  }<a name="line.485"></a>
-<span class="sourceLineNo">486</span><a name="line.486"></a>
-<span class="sourceLineNo">487</span>  private static ChecksumCreater createChecksumCreater()<a name="line.487"></a>
-<span class="sourceLineNo">488</span>      throws NoSuchMethodException, ClassNotFoundException {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    Method getConfMethod = DFSClient.class.getMethod("getConf");<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    try {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      return createChecksumCreater28(getConfMethod,<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        Class.forName("org.apache.hadoop.hdfs.client.impl.DfsClientConf"));<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    } catch (ClassNotFoundException e) {<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      LOG.debug("No DfsClientConf class found, should be hadoop 2.7-", e);<a name="line.494"></a>
-<span class="sourceLineNo">495</span>    }<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    return createChecksumCreater27(getConfMethod,<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      Class.forName("org.apache.hadoop.hdfs.DFSClient$Conf"));<a name="line.497"></a>
-<span class="sourceLineNo">498</span>  }<a name="line.498"></a>
-<span class="sourceLineNo">499</span><a name="line.499"></a>
-<span class="sourceLineNo">500</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      CryptoProtocolVersion[].class, String.class);<a name="line.503"></a>
-<span class="sourceLineNo">504</span><a name="line.504"></a>
-<span class="sourceLineNo">505</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        supportedVersions) -&gt; {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.507"></a>
-<span class="sourceLineNo">508</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    };<a name="line.509"></a>
-<span class="sourceLineNo">510</span>  }<a name="line.510"></a>
-<span class="sourceLineNo">511</span><a name="line.511"></a>
-<span class="sourceLineNo">512</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      CryptoProtocolVersion[].class);<a name="line.515"></a>
-<span class="sourceLineNo">516</span><a name="line.516"></a>
-<span class="sourceLineNo">517</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.517"></a>
-<span class="sourceLineNo">518</span>        supportedVersions) -&gt; {<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.519"></a>
-<span class="sourceLineNo">520</span>        createParent, replication, blockSize, supportedVersions);<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    };<a name="line.521"></a>
-<span class="sourceLineNo">522</span>  }<a name="line.522"></a>
-<span class="sourceLineNo">523</span><a name="line.523"></a>
-<span class="sourceLineNo">524</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>    try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>      return createFileCreator3();<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    } catch (NoSuchMethodException e) {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    return createFileCreator2();<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  }<a name="line.531"></a>
-<span class="sourceLineNo">532</span><a name="line.532"></a>
-<span class="sourceLineNo">533</span>  // cancel the processing if DFSClient is already closed.<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>    private final DFSClient client;<a name="line.536"></a>
-<span class="sourceLineNo">537</span><a name="line.537"></a>
-<span class="sourceLineNo">538</span>    public CancelOnClose(DFSClient client) {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>      this.client = client;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    }<a name="line.540"></a>
-<span class="sourceLineNo">541</span><a name="line.541"></a>
-<span class="sourceLineNo">542</span>    @Override<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    public boolean progress() {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  static {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    try {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      PIPELINE_ACK_STATUS_GETTER = createPipelineAckStatusGetter();<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      STORAGE_TYPE_SETTER = createStorageTypeSetter();<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      BLOCK_ADDER = createBlockAdder();<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      LEASE_MANAGER = createLeaseManager();<a name="line.553"></a>
-<span class="sourceLineNo">554</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      PB_HELPER = createPBHelper();<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      CHECKSUM_CREATER = createChecksumCreater();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>      FILE_CREATOR = createFileCreator();<a name="line.557"></a>
-<span class="sourceLineNo">558</span>    } catch (Exception e) {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.559"></a>
-<span class="sourceLineNo">560</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.560"></a>
-<span class="sourceLineNo">561</span>          "HBASE-16110 for more information.";<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      LOG.error(msg, e);<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      throw new Error(msg, e);<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    }<a name="line.564"></a>
-<span class="sourceLineNo">565</span>  }<a name="line.565"></a>
-<span class="sourceLineNo">566</span><a name="line.566"></a>
-<span class="sourceLineNo">567</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.568"></a>
-<span class="sourceLineNo">569</span>  }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.572"></a>
-<span class="sourceLineNo">573</span>  }<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    return CHECKSUM_CREATER.createChecksum(client);<a name="line.576"></a>
-<span class="sourceLineNo">577</span>  }<a name="line.577"></a>
-<span class="sourceLineNo">578</span><a name="line.578"></a>
-<span class="sourceLineNo">579</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    return PIPELINE_ACK_STATUS_GETTER.get(ack);<a name="line.580"></a>
-<span class="sourceLineNo">581</span>  }<a name="line.581"></a>
-<span class="sourceLineNo">582</span><a name="line.582"></a>
-<span class="sourceLineNo">583</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.583"></a>
-<span class="sourceLineNo">584</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.584"></a>
-<span class="sourceLineNo">585</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      new ProtobufVarint32FrameDecoder(),<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>        @Override<a name="line.590"></a>
-<span class="sourceLineNo">591</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>            throws Exception {<a name="line.592"></a>
-<span class="sourceLineNo">593</span>          Status pipelineStatus = resp.getStatus();<a name="line.593"></a>
-<span class="sourceLineNo">594</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.595"></a>
-<span class="sourceLineNo">596</span>          }<a name="line.596"></a>
-<span class="sourceLineNo">597</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.600"></a>
-<span class="sourceLineNo">601</span>                  resp.getMessage() + ", " + logInfo);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>            } else {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.603"></a>
-<span class="sourceLineNo">604</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>            }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>          }<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          // success<a name="line.607"></a>
-<span class="sourceLineNo">608</span>          ChannelPipeline p = ctx.pipeline();<a name="line.608"></a>
-<span class="sourceLineNo">609</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.610"></a>
-<span class="sourceLineNo">611</span>            // of pipeline.<a name="line.611"></a>
-<span class="sourceLineNo">612</span>            if (handler instanceof IdleStateHandler) {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>              break;<a name="line.613"></a>
-<span class="sourceLineNo">614</span>            }<a name="line.614"></a>
-<span class="sourceLineNo">615</span>          }<a name="line.615"></a>
-<span class="sourceLineNo">616</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.616"></a>
-<span class="sourceLineNo">617</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.617"></a>
-<span class="sourceLineNo">618</span>          ctx.channel().config().setAutoRead(false);<a name="line.618"></a>
-<span class="sourceLineNo">619</span>          promise.trySuccess(ctx.channel());<a name="line.619"></a>
-<span class="sourceLineNo">620</span>        }<a name="line.620"></a>
-<span class="sourceLineNo">621</span><a name="line.621"></a>
-<span class="sourceLineNo">622</span>        @Override<a name="line.622"></a>
-<span class="sourceLineNo">623</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.624"></a>
-<span class="sourceLineNo">625</span>        }<a name="line.625"></a>
-<span class="sourceLineNo">626</span><a name="line.626"></a>
-<span class="sourceLineNo">627</span>        @Override<a name="line.627"></a>
-<span class="sourceLineNo">628</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.628"></a>
-<span class="sourceLineNo">629</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.629"></a>
-<span class="sourceLineNo">630</span>            promise<a name="line.630"></a>
-<span class="sourceLineNo">631</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          } else {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>            super.userEventTriggered(ctx, evt);<a name="line.633"></a>
-<span class="sourceLineNo">634</span>          }<a name="line.634"></a>
-<span class="sourceLineNo">635</span>        }<a name="line.635"></a>
-<span class="sourceLineNo">636</span><a name="line.636"></a>
-<span class="sourceLineNo">637</span>        @Override<a name="line.637"></a>
-<span class="sourceLineNo">638</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.638"></a>
-<span class="sourceLineNo">639</span>          promise.tryFailure(cause);<a name="line.639"></a>
-<span class="sourceLineNo">640</span>        }<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      });<a name="line.641"></a>
-<span class="sourceLineNo">642</span>  }<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>  private static void requestWriteBlock(Channel channel, Enum&lt;?&gt; storageType,<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.645"></a>
-<span class="sourceLineNo">646</span>    OpWriteBlockProto proto = STORAGE_TYPE_SETTER.set(writeBlockProtoBuilder, storageType).build();<a name="line.646"></a>
-<span class="sourceLineNo">647</span>    int protoLen = proto.getSerializedSize();<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    ByteBuf buffer =<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.650"></a>
-<span class="sourceLineNo">651</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.651"></a>
-<span class="sourceLineNo">652</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    channel.writeAndFlush(buffer);<a name="line.653"></a>
-<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.656"></a>
-<span class="sourceLineNo">657</span>      Enum&lt;?&gt; storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      throws IOException {<a name="line.659"></a>
-<span class="sourceLineNo">660</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.662"></a>
-<span class="sourceLineNo">663</span><a name="line.663"></a>
-<span class="sourceLineNo">664</span>      @Override<a name="line.664"></a>
-<span class="sourceLineNo">665</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        if (future.isSuccess()) {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>          // setup response processing pipeline first, then send request.<a name="line.667"></a>
-<span class="sourceLineNo">668</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.669"></a>
-<span class="sourceLineNo">670</span>        } else {<a name="line.670"></a>
-<span class="sourceLineNo">671</span>          promise.tryFailure(future.cause());<a name="line.671"></a>
-<span class="sourceLineNo">672</span>        }<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    });<a name="line.674"></a>
-<span class="sourceLineNo">675</span>  }<a name="line.675"></a>
-<span class="sourceLineNo">676</span><a name="line.676"></a>
-<span class="sourceLineNo">677</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.677"></a>
-<span class="sourceLineNo">678</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.678"></a>
-<span class="sourceLineNo">679</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    Enum&lt;?&gt;[] storageTypes = locatedBlock.getStorageTypes();<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.682"></a>
-<span class="sourceLineNo">683</span>    boolean connectToDnViaHostname =<a name="line.683"></a>
-<span class="sourceLineNo">684</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.688"></a>
-<span class="sourceLineNo">689</span>        .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PB_HELPER.convert(blockCopy))<a name="line.689"></a>
-<span class="sourceLineNo">690</span>            .setToken(PB_HELPER.convert(locatedBlock.getBlockToken())))<a name="line.690"></a>
-<span class="sourceLineNo">691</span>        .setClientName(clientName).build();<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.693"></a>
-<span class="sourceLineNo">694</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.694"></a>
-<span class="sourceLineNo">695</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.695"></a>
-<span class="sourceLineNo">696</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.696"></a>
-<span class="sourceLineNo">697</span>        .setRequestedChecksum(checksumProto)<a name="line.697"></a>
-<span class="sourceLineNo">698</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.700"></a>
-<span class="sourceLineNo">701</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.701"></a>
-<span class="sourceLineNo">702</span>      Enum&lt;?&gt; storageType = storageTypes[i];<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      futureList.add(promise);<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.705"></a>
-<span class="sourceLineNo">706</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.706"></a>
-<span class="sourceLineNo">707</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span><a name="line.708"></a>
-<span class="sourceLineNo">709</span>            @Override<a name="line.709"></a>
-<span class="sourceLineNo">710</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.710"></a>
-<span class="sourceLineNo">711</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.711"></a>
-<span class="sourceLineNo">712</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.712"></a>
-<span class="sourceLineNo">713</span>              // a null handler.<a name="line.713"></a>
-<span class="sourceLineNo">714</span>            }<a name="line.714"></a>
-<span class="sourceLineNo">715</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.715"></a>
-<span class="sourceLineNo">716</span><a name="line.716"></a>
-<span class="sourceLineNo">717</span>            @Override<a name="line.717"></a>
-<span class="sourceLineNo">718</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.718"></a>
-<span class="sourceLineNo">719</span>              if (future.isSuccess()) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.720"></a>
-<span class="sourceLineNo">721</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.721"></a>
-<span class="sourceLineNo">722</span>              } else {<a name="line.722"></a>
-<span class="sourceLineNo">723</span>                promise.tryFailure(future.cause());<a name="line.723"></a>
-<span class="sourceLineNo">724</span>              }<a name="line.724"></a>
-<span class="sourceLineNo">725</span>            }<a name="line.725"></a>
-<span class="sourceLineNo">726</span>          });<a name="line.726"></a>
-<span class="sourceLineNo">727</span>    }<a name="line.727"></a>
-<span class="sourceLineNo">728</span>    return futureList;<a name="line.728"></a>
-<span class="sourceLineNo">729</span>  }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>  /**<a name="line.731"></a>
-<span class="sourceLineNo">732</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.732"></a>
-<span class="sourceLineNo">733</span>   */<a name="line.733"></a>
-<span class="sourceLineNo">734</span>  public static class NameNodeException extends IOException {<a name="line.734"></a>
-<span class="sourceLineNo">735</span><a name="line.735"></a>
-<span class="sourceLineNo">736</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    public NameNodeException(Throwable cause) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      super(cause);<a name="line.739"></a>
-<span class="sourceLineNo">740</span>    }<a name="line.740"></a>
-<span class="sourceLineNo">741</span>  }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.743"></a>
-<span class="sourceLineNo">744</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.744"></a>
-<span class="sourceLineNo">745</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    Configuration conf = dfs.getConf();<a name="line.746"></a>
-<span class="sourceLineNo">747</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    DFSClient client = dfs.getClient();<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    String clientName = client.getClientName();<a name="line.749"></a>
-<span class="sourceLineNo">750</span>    ClientProtocol namenode = client.getNamenode();<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.751"></a>
-<span class="sourceLineNo">752</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.752"></a>
-<span class="sourceLineNo">753</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    for (int retry = 0;; retry++) {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      HdfsFileStatus stat;<a name="line.755"></a>
-<span class="sourceLineNo">756</span>      try {<a name="line.756"></a>
-<span class="sourceLineNo">757</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.757"></a>
-<span class="sourceLineNo">758</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.758"></a>
-<span class="sourceLineNo">759</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.759"></a>
-<span class="sourceLineNo">760</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      } catch (Exception e) {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>        if (e instanceof RemoteException) {<a name="line.762"></a>
-<span class="sourceLineNo">763</span>          throw (RemoteException) e;<a name="line.763"></a>
-<span class="sourceLineNo">764</span>        } else {<a name="line.764"></a>
-<span class="sourceLineNo">765</span>          throw new NameNodeException(e);<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        }<a name="line.766"></a>
-<span class="sourceLineNo">767</span>      }<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      beginFileLease(client, stat.getFileId());<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      boolean succ = false;<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      LocatedBlock locatedBlock = null;<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      try {<a name="line.772"></a>
-<span class="sourceLineNo">773</span>        DataChecksum summer = createChecksum(client);<a name="line.773"></a>
-<span class="sourceLineNo">774</span>        locatedBlock = BLOCK_ADDER.addBlock(namenode, src, client.getClientName(), null,<a name="line.774"></a>
-<span class="sourceLineNo">775</span>          excludesNodes, stat.getFileId(), null);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.776"></a>
-<span class="sourceLineNo">777</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.777"></a>
-<span class="sourceLineNo">778</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.778"></a>
-<span class="sourceLineNo">779</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>          try {<a name="line.780"></a>
-<span class="sourceLineNo">781</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.781"></a>
-<span class="sourceLineNo">782</span>          } catch (Exception e) {<a name="line.782"></a>
-<span class="sourceLineNo">783</span>            // exclude the broken DN next time<a name="line.783"></a>
-<span class="sourceLineNo">784</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.784"></a>
-<span class="sourceLineNo">785</span>            throw e;<a name="line.785"></a>
-<span class="sourceLineNo">786</span>          }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>        }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.788"></a>
-<span class="sourceLineNo">789</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.789"></a>
-<span class="sourceLineNo">790</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.790"></a>
-<span class="sourceLineNo">791</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.791"></a>
-<span class="sourceLineNo">792</span>        succ = true;<a name="line.792"></a>
-<span class="sourceLineNo">793</span>        return output;<a name="line.793"></a>
-<span class="sourceLineNo">794</span>      } catch (RemoteException e) {<a name="line.794"></a>
-<span class="sourceLineNo">795</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.795"></a>
-<span class="sourceLineNo">796</span>        if (shouldRetryCreate(e)) {<a name="line.796"></a>
-<span class="sourceLineNo">797</span>          if (retry &gt;= createMaxRetries) {<a name="line.797"></a>
-<span class="sourceLineNo">798</span>            throw e.unwrapRemoteException();<a name="line.798"></a>
-<span class="sourceLineNo">799</span>          }<a name="line.799"></a>
-<span class="sourceLineNo">800</span>        } else {<a name="line.800"></a>
-<span class="sourceLineNo">801</span>          throw e.unwrapRemoteException();<a name="line.801"></a>
-<span class="sourceLineNo">802</span>        }<a name="line.802"></a>
-<span class="sourceLineNo">803</span>      } catch (IOException e) {<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        if (retry &gt;= createMaxRetries) {<a name="line.805"></a>
-<span class="sourceLineNo">806</span>          throw e;<a name="line.806"></a>
-<span class="sourceLineNo">807</span>        }<a name="line.807"></a>
-<span class="sourceLineNo">808</span>        // overwrite the old broken file.<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        overwrite = true;<a name="line.809"></a>
-<span class="sourceLineNo">810</span>        try {<a name="line.810"></a>
-<span class="sourceLineNo">811</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.811"></a>
-<span class="sourceLineNo">812</span>        } catch (InterruptedException ie) {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>          throw new InterruptedIOException();<a name="line.813"></a>
-<span class="sourceLineNo">814</span>        }<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      } finally {<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        if (!succ) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>          if (futureList != null) {<a name="line.817"></a>
-<span class="sourceLineNo">818</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.818"></a>
-<span class="sourceLineNo">819</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.819"></a>
-<span class="sourceLineNo">820</span><a name="line.820"></a>
-<span class="sourceLineNo">821</span>                @Override<a name="line.821"></a>
-<span class="sourceLineNo">822</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>                  if (future.isSuccess()) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>                    future.getNow().close();<a name="line.824"></a>
-<span class="sourceLineNo">825</span>                  }<a name="line.825"></a>
-<span class="sourceLineNo">826</span>                }<a name="line.826"></a>
-<span class="sourceLineNo">827</span>              });<a name="line.827"></a>
-<span class="sourceLineNo">828</span>            }<a name="line.828"></a>
-<span class="sourceLineNo">829</span>          }<a name="line.829"></a>
-<span class="sourceLineNo">830</span>          endFileLease(client, stat.getFileId());<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        }<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      }<a name="line.832"></a>
-<span class="sourceLineNo">833</span>    }<a name="line.833"></a>
-<span class="sourceLineNo">834</span>  }<a name="line.834"></a>
-<span class="sourceLineNo">835</span><a name="line.835"></a>
-<span class="sourceLineNo">836</span>  /**<a name="line.836"></a>
-<span class="sourceLineNo">837</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.837"></a>
-<span class="sourceLineNo">838</span>   * inside an {@link EventLoop}.<a name="line.838"></a>
-<span class="sourceLineNo">839</span>   */<a name="line.839"></a>
-<span class="sourceLineNo">840</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.840"></a>
-<span class="sourceLineNo">841</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.841"></a>
-<span class="sourceLineNo">842</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.842"></a>
-<span class="sourceLineNo">843</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.843"></a>
-<span class="sourceLineNo">844</span><a name="line.844"></a>
-<span class="sourceLineNo">845</span>      @Override<a name="line.845"></a>
-<span class="sourceLineNo">846</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.846"></a>
-<span class="sourceLineNo">847</span>          throws IOException, UnresolvedLinkException {<a name="line.847"></a>
-<span class="sourceLineNo">848</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          blockSize, eventLoopGroup, channelClass);<a name="line.849"></a>
-<span class="sourceLineNo">850</span>      }<a name="line.850"></a>
-<span class="sourceLineNo">851</span><a name="line.851"></a>
-<span class="sourceLineNo">852</span>      @Override<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.853"></a>
-<span class="sourceLineNo">854</span>        throw new UnsupportedOperationException();<a name="line.854"></a>
-<span class="sourceLineNo">855</span>      }<a name="line.855"></a>
-<span class="sourceLineNo">856</span>    }.resolve(dfs, f);<a name="line.856"></a>
-<span class="sourceLineNo">857</span>  }<a name="line.857"></a>
-<span class="sourceLineNo">858</span><a name="line.858"></a>
-<span class="sourceLineNo">859</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.861"></a>
-<span class="sourceLineNo">862</span>    // DFSOutputStream.newStreamForCreate.<a name="line.862"></a>
-<span class="sourceLineNo">863</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.863"></a>
-<span class="sourceLineNo">864</span>  }<a name="line.864"></a>
-<span class="sourceLineNo">865</span><a name="line.865"></a>
-<span class="sourceLineNo">866</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.866"></a>
-<span class="sourceLineNo">867</span>      ExtendedBlock block, long fileId) {<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    for (int retry = 0;; retry++) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      try {<a name="line.869"></a>
-<span class="sourceLineNo">870</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>          endFileLease(client, fileId);<a name="line.871"></a>
-<span class="sourceLineNo">872</span>          return;<a name="line.872"></a>
-<span class="sourceLineNo">873</span>        } else {<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>      } catch (RemoteException e) {<a name="line.876"></a>
-<span class="sourceLineNo">877</span>        IOException ioe = e.unwrapRemoteException();<a name="line.877"></a>
-<span class="sourceLineNo">878</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.879"></a>
-<span class="sourceLineNo">880</span>          return;<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        } else {<a name="line.881"></a>
-<span class="sourceLineNo">882</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.882"></a>
-<span class="sourceLineNo">883</span>        }<a name="line.883"></a>
-<span class="sourceLineNo">884</span>      } catch (Exception e) {<a name="line.884"></a>
-<span class="sourceLineNo">885</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.885"></a>
-<span class="sourceLineNo">886</span>      }<a name="line.886"></a>
-<span class="sourceLineNo">887</span>      sleepIgnoreInterrupt(retry);<a name="line.887"></a>
-<span class="sourceLineNo">888</span>    }<a name="line.888"></a>
-<span class="sourceLineNo">889</span>  }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.891"></a>
-<span class="sourceLineNo">892</span>    try {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.893"></a>
-<span class="sourceLineNo">894</span>    } catch (InterruptedException e) {<a name="line.894"></a>
-<span class="sourceLineNo">895</span>    }<a name="line.895"></a>
-<span class="sourceLineNo">896</span>  }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>}<a name="line.897"></a>
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-</pre>
-</div>
-</body>
-</html>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
index eb6a26e..6684af5 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
@@ -29,9 +29,9 @@
 <span class="sourceLineNo">021</span>import static org.apache.hadoop.fs.CreateFlag.OVERWRITE;<a name="line.21"></a>
 <span class="sourceLineNo">022</span>import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createEncryptor;<a name="line.22"></a>
 <span class="sourceLineNo">023</span>import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.trySaslNegotiate;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;<a name="line.26"></a>
+<span class="sourceLineNo">024</span>import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import static org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage.PIPELINE_SETUP_CREATE;<a name="line.27"></a>
 <span class="sourceLineNo">028</span>import static org.apache.hbase.thirdparty.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;<a name="line.28"></a>
 <span class="sourceLineNo">029</span>import static org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE;<a name="line.29"></a>
@@ -53,856 +53,584 @@
 <span class="sourceLineNo">045</span>import org.apache.hadoop.fs.FileSystem;<a name="line.45"></a>
 <span class="sourceLineNo">046</span>import org.apache.hadoop.fs.FileSystemLinkResolver;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import org.apache.hadoop.fs.Path;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.fs.UnresolvedLinkException;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hdfs.DFSOutputStream;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hdfs.protocol.ClientProtocol;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hdfs.protocol.DatanodeInfo;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hdfs.protocol.ExtendedBlock;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hdfs.protocol.LocatedBlock;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hdfs.protocol.datatransfer.Op;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.io.EnumSetWritable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.net.NetUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;<a name="line.82"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.fs.StorageType;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.fs.UnresolvedLinkException;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hdfs.DFSOutputStream;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hdfs.protocol.ClientProtocol;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hdfs.protocol.DatanodeInfo;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hdfs.protocol.ExtendedBlock;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hdfs.protocol.LocatedBlock;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hdfs.protocol.datatransfer.Op;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.ECN;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.io.EnumSetWritable;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.net.NetUtils;<a name="line.82"></a>
 <span class="sourceLineNo">083</span>import org.apache.hadoop.security.token.Token;<a name="line.83"></a>
 <span class="sourceLineNo">084</span>import org.apache.hadoop.util.DataChecksum;<a name="line.84"></a>
 <span class="sourceLineNo">085</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.85"></a>
 <span class="sourceLineNo">086</span>import org.slf4j.Logger;<a name="line.86"></a>
 <span class="sourceLineNo">087</span>import org.slf4j.LoggerFactory;<a name="line.87"></a>
 <span class="sourceLineNo">088</span><a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.io.netty.bootstrap.Bootstrap;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufAllocator;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufOutputStream;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.io.netty.buffer.PooledByteBufAllocator;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.io.netty.channel.Channel;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelFuture;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelFutureListener;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandler;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelInitializer;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelPipeline;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hbase.thirdparty.io.netty.channel.EventLoop;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufDecoder;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateHandler;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.Future;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.FutureListener;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise;<a name="line.112"></a>
-<span class="sourceLineNo">113</span><a name="line.113"></a>
-<span class="sourceLineNo">114</span>/**<a name="line.114"></a>
-<span class="sourceLineNo">115</span> * Helper class for implementing {@link FanOutOneBlockAsyncDFSOutput}.<a name="line.115"></a>
-<span class="sourceLineNo">116</span> */<a name="line.116"></a>
-<span class="sourceLineNo">117</span>@InterfaceAudience.Private<a name="line.117"></a>
-<span class="sourceLineNo">118</span>public final class FanOutOneBlockAsyncDFSOutputHelper {<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  private static final Logger LOG =<a name="line.119"></a>
-<span class="sourceLineNo">120</span>      LoggerFactory.getLogger(FanOutOneBlockAsyncDFSOutputHelper.class);<a name="line.120"></a>
-<span class="sourceLineNo">121</span><a name="line.121"></a>
-<span class="sourceLineNo">122</span>  private FanOutOneBlockAsyncDFSOutputHelper() {<a name="line.122"></a>
-<span class="sourceLineNo">123</span>  }<a name="line.123"></a>
+<span class="sourceLineNo">089</span>import org.apache.hbase.thirdparty.io.netty.bootstrap.Bootstrap;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufAllocator;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufOutputStream;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.io.netty.buffer.PooledByteBufAllocator;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.io.netty.channel.Channel;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelFuture;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelFutureListener;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandler;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelInitializer;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelPipeline;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hbase.thirdparty.io.netty.channel.EventLoop;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufDecoder;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateHandler;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.Future;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.FutureListener;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise;<a name="line.110"></a>
+<span class="sourceLineNo">111</span><a name="line.111"></a>
+<span class="sourceLineNo">112</span>/**<a name="line.112"></a>
+<span class="sourceLineNo">113</span> * Helper class for implementing {@link FanOutOneBlockAsyncDFSOutput}.<a name="line.113"></a>
+<span class="sourceLineNo">114</span> */<a name="line.114"></a>
+<span class="sourceLineNo">115</span>@InterfaceAudience.Private<a name="line.115"></a>
+<span class="sourceLineNo">116</span>public final class FanOutOneBlockAsyncDFSOutputHelper {<a name="line.116"></a>
+<span class="sourceLineNo">117</span>  private static final Logger LOG =<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      LoggerFactory.getLogger(FanOutOneBlockAsyncDFSOutputHelper.class);<a name="line.118"></a>
+<span class="sourceLineNo">119</span><a name="line.119"></a>
+<span class="sourceLineNo">120</span>  private FanOutOneBlockAsyncDFSOutputHelper() {<a name="line.120"></a>
+<span class="sourceLineNo">121</span>  }<a name="line.121"></a>
+<span class="sourceLineNo">122</span><a name="line.122"></a>
+<span class="sourceLineNo">123</span>  public static final String ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES = "hbase.fs.async.create.retries";<a name="line.123"></a>
 <span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>  public static final String ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES = "hbase.fs.async.create.retries";<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  public static final int DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES = 10;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  // use pooled allocator for performance.<a name="line.128"></a>
-<span class="sourceLineNo">129</span>  private static final ByteBufAllocator ALLOC = PooledByteBufAllocator.DEFAULT;<a name="line.129"></a>
-<span class="sourceLineNo">130</span><a name="line.130"></a>
-<span class="sourceLineNo">131</span>  // copied from DFSPacket since it is package private.<a name="line.131"></a>
-<span class="sourceLineNo">132</span>  public static final long HEART_BEAT_SEQNO = -1L;<a name="line.132"></a>
-<span class="sourceLineNo">133</span><a name="line.133"></a>
-<span class="sourceLineNo">134</span>  // Timeouts for communicating with DataNode for streaming writes/reads<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  public static final int READ_TIMEOUT = 60 * 1000;<a name="line.135"></a>
+<span class="sourceLineNo">125</span>  public static final int DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES = 10;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  // use pooled allocator for performance.<a name="line.126"></a>
+<span class="sourceLineNo">127</span>  private static final ByteBufAllocator ALLOC = PooledByteBufAllocator.DEFAULT;<a name="line.127"></a>
+<span class="sourceLineNo">128</span><a name="line.128"></a>
+<span class="sourceLineNo">129</span>  // copied from DFSPacket since it is package private.<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  public static final long HEART_BEAT_SEQNO = -1L;<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  // Timeouts for communicating with DataNode for streaming writes/reads<a name="line.132"></a>
+<span class="sourceLineNo">133</span>  public static final int READ_TIMEOUT = 60 * 1000;<a name="line.133"></a>
+<span class="sourceLineNo">134</span><a name="line.134"></a>
+<span class="sourceLineNo">135</span>  private static final DatanodeInfo[] EMPTY_DN_ARRAY = new DatanodeInfo[0];<a name="line.135"></a>
 <span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>  private static final DatanodeInfo[] EMPTY_DN_ARRAY = new DatanodeInfo[0];<a name="line.137"></a>
+<span class="sourceLineNo">137</span>  private interface LeaseManager {<a name="line.137"></a>
 <span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>  // helper class for getting Status from PipelineAckProto. In hadoop 2.6 or before, there is a<a name="line.139"></a>
-<span class="sourceLineNo">140</span>  // getStatus method, and for hadoop 2.7 or after, the status is retrieved from flag. The flag may<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  // get from proto directly, or combined by the reply field of the proto and a ECN object. See<a name="line.141"></a>
-<span class="sourceLineNo">142</span>  // createPipelineAckStatusGetter for more details.<a name="line.142"></a>
-<span class="sourceLineNo">143</span>  private interface PipelineAckStatusGetter {<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    Status get(PipelineAckProto ack);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  private static final PipelineAckStatusGetter PIPELINE_ACK_STATUS_GETTER;<a name="line.147"></a>
-<span class="sourceLineNo">148</span><a name="line.148"></a>
-<span class="sourceLineNo">149</span>  // StorageType enum is placed under o.a.h.hdfs in hadoop 2.6 and o.a.h.fs in hadoop 2.7. So here<a name="line.149"></a>
-<span class="sourceLineNo">150</span>  // we need to use reflection to set it.See createStorageTypeSetter for more details.<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  private interface StorageTypeSetter {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    OpWriteBlockProto.Builder set(OpWriteBlockProto.Builder builder, Enum&lt;?&gt; storageType);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
+<span class="sourceLineNo">139</span>    void begin(DFSClient client, long inodeId);<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>    void end(DFSClient client, long inodeId);<a name="line.141"></a>
+<span class="sourceLineNo">142</span>  }<a name="line.142"></a>
+<span class="sourceLineNo">143</span><a name="line.143"></a>
+<span class="sourceLineNo">144</span>  private static final LeaseManager LEASE_MANAGER;<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span>  // This is used to terminate a recoverFileLease call when FileSystem is already closed.<a name="line.146"></a>
+<span class="sourceLineNo">147</span>  // isClientRunning is not public so we need to use reflection.<a name="line.147"></a>
+<span class="sourceLineNo">148</span>  private interface DFSClientAdaptor {<a name="line.148"></a>
+<span class="sourceLineNo">149</span><a name="line.149"></a>
+<span class="sourceLineNo">150</span>    boolean isClientRunning(DFSClient client);<a name="line.150"></a>
+<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
+<span class="sourceLineNo">152</span><a name="line.152"></a>
+<span class="sourceLineNo">153</span>  private static final DFSClientAdaptor DFS_CLIENT_ADAPTOR;<a name="line.153"></a>
 <span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>  private static final StorageTypeSetter STORAGE_TYPE_SETTER;<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>  // helper class for calling add block method on namenode. There is a addBlockFlags parameter for<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  // hadoop 2.8 or later. See createBlockAdder for more details.<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  private interface BlockAdder {<a name="line.159"></a>
-<span class="sourceLineNo">160</span><a name="line.160"></a>
-<span class="sourceLineNo">161</span>    LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,<a name="line.161"></a>
-<span class="sourceLineNo">162</span>        ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId, String[] favoredNodes)<a name="line.162"></a>
-<span class="sourceLineNo">163</span>        throws IOException;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  }<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>  private static final BlockAdder BLOCK_ADDER;<a name="line.166"></a>
-<span class="sourceLineNo">167</span><a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private interface LeaseManager {<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>    void begin(DFSClient client, long inodeId);<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>    void end(DFSClient client, long inodeId);<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>  private static final LeaseManager LEASE_MANAGER;<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>  // This is used to terminate a recoverFileLease call when FileSystem is already closed.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  // isClientRunning is not public so we need to use reflection.<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  private interface DFSClientAdaptor {<a name="line.179"></a>
-<span class="sourceLineNo">180</span><a name="line.180"></a>
-<span class="sourceLineNo">181</span>    boolean isClientRunning(DFSClient client);<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  }<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>  private static final DFSClientAdaptor DFS_CLIENT_ADAPTOR;<a name="line.184"></a>
-<span class="sourceLineNo">185</span><a name="line.185"></a>
-<span class="sourceLineNo">186</span>  // helper class for convert protos.<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  private interface PBHelper {<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>    ExtendedBlockProto convert(ExtendedBlock b);<a name="line.189"></a>
-<span class="sourceLineNo">190</span><a name="line.190"></a>
-<span class="sourceLineNo">191</span>    TokenProto convert(Token&lt;?&gt; tok);<a name="line.191"></a>
-<span class="sourceLineNo">192</span>  }<a name="line.192"></a>
-<span class="sourceLineNo">193</span><a name="line.193"></a>
-<span class="sourceLineNo">194</span>  private static final PBHelper PB_HELPER;<a name="line.194"></a>
+<span class="sourceLineNo">155</span>  // helper class for creating files.<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  private interface FileCreator {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    default HdfsFileStatus create(ClientProtocol instance, String src, FsPermission masked,<a name="line.157"></a>
+<span class="sourceLineNo">158</span>        String clientName, EnumSetWritable&lt;CreateFlag&gt; flag, boolean createParent,<a name="line.158"></a>
+<span class="sourceLineNo">159</span>        short replication, long blockSize, CryptoProtocolVersion[] supportedVersions)<a name="line.159"></a>
+<span class="sourceLineNo">160</span>        throws Exception {<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      try {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>        return (HdfsFileStatus) createObject(instance, src, masked, clientName, flag, createParent,<a name="line.162"></a>
+<span class="sourceLineNo">163</span>          replication, blockSize, supportedVersions);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      } catch (InvocationTargetException e) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>        if (e.getCause() instanceof Exception) {<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          throw (Exception) e.getCause();<a name="line.166"></a>
+<span class="sourceLineNo">167</span>        } else {<a name="line.167"></a>
+<span class="sourceLineNo">168</span>          throw new RuntimeException(e.getCause());<a name="line.168"></a>
+<span class="sourceLineNo">169</span>        }<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      }<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    }<a name="line.171"></a>
+<span class="sourceLineNo">172</span><a name="line.172"></a>
+<span class="sourceLineNo">173</span>    Object createObject(ClientProtocol instance, String src, FsPermission masked, String clientName,<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        EnumSetWritable&lt;CreateFlag&gt; flag, boolean createParent, short replication, long blockSize,<a name="line.174"></a>
+<span class="sourceLineNo">175</span>        CryptoProtocolVersion[] supportedVersions) throws Exception;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  }<a name="line.176"></a>
+<span class="sourceLineNo">177</span><a name="line.177"></a>
+<span class="sourceLineNo">178</span>  private static final FileCreator FILE_CREATOR;<a name="line.178"></a>
+<span class="sourceLineNo">179</span><a name="line.179"></a>
+<span class="sourceLineNo">180</span>  private static DFSClientAdaptor createDFSClientAdaptor() throws NoSuchMethodException {<a name="line.180"></a>
+<span class="sourceLineNo">181</span>    Method isClientRunningMethod = DFSClient.class.getDeclaredMethod("isClientRunning");<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    isClientRunningMethod.setAccessible(true);<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    return new DFSClientAdaptor() {<a name="line.183"></a>
+<span class="sourceLineNo">184</span><a name="line.184"></a>
+<span class="sourceLineNo">185</span>      @Override<a name="line.185"></a>
+<span class="sourceLineNo">186</span>      public boolean isClientRunning(DFSClient client) {<a name="line.186"></a>
+<span class="sourceLineNo">187</span>        try {<a name="line.187"></a>
+<span class="sourceLineNo">188</span>          return (Boolean) isClientRunningMethod.invoke(client);<a name="line.188"></a>
+<span class="sourceLineNo">189</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.189"></a>
+<span class="sourceLineNo">190</span>          throw new RuntimeException(e);<a name="line.190"></a>
+<span class="sourceLineNo">191</span>        }<a name="line.191"></a>
+<span class="sourceLineNo">192</span>      }<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    };<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  }<a name="line.194"></a>
 <span class="sourceLineNo">195</span><a name="line.195"></a>
-<span class="sourceLineNo">196</span>  // helper class for creating data checksum.<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private interface ChecksumCreater {<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    DataChecksum createChecksum(DFSClient client);<a name="line.198"></a>
-<span class="sourceLineNo">199</span>  }<a name="line.199"></a>
-<span class="sourceLineNo">200</span><a name="line.200"></a>
-<span class="sourceLineNo">201</span>  private static final ChecksumCreater CHECKSUM_CREATER;<a name="line.201"></a>
-<span class="sourceLineNo">202</span><a name="line.202"></a>
-<span class="sourceLineNo">203</span>  // helper class for creating files.<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  private interface FileCreator {<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    default HdfsFileStatus create(ClientProtocol instance, String src, FsPermission masked,<a name="line.205"></a>
-<span class="sourceLineNo">206</span>        String clientName, EnumSetWritable&lt;CreateFlag&gt; flag, boolean createParent,<a name="line.206"></a>
-<span class="sourceLineNo">207</span>        short replication, long blockSize, CryptoProtocolVersion[] supportedVersions)<a name="line.207"></a>
-<span class="sourceLineNo">208</span>        throws Exception {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      try {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        return (HdfsFileStatus) createObject(instance, src, masked, clientName, flag, createParent,<a name="line.210"></a>
-<span class="sourceLineNo">211</span>          replication, blockSize, supportedVersions);<a name="line.211"></a>
-<span class="sourceLineNo">212</span>      } catch (InvocationTargetException e) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        if (e.getCause() instanceof Exception) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>          throw (Exception) e.getCause();<a name="line.214"></a>
-<span class="sourceLineNo">215</span>        } else {<a name="line.215"></a>
-<span class="sourceLineNo">216</span>          throw new RuntimeException(e.getCause());<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        }<a name="line.217"></a>
-<span class="sourceLineNo">218</span>      }<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    }<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>    Object createObject(ClientProtocol instance, String src, FsPermission masked, String clientName,<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        EnumSetWritable&lt;CreateFlag&gt; flag, boolean createParent, short replication, long blockSize,<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        CryptoProtocolVersion[] supportedVersions) throws Exception;<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  }<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>  private static final FileCreator FILE_CREATOR;<a name="line.226"></a>
-<span class="sourceLineNo">227</span><a name="line.227"></a>
-<span class="sourceLineNo">228</span>  private static DFSClientAdaptor createDFSClientAdaptor() throws NoSuchMethodException {<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    Method isClientRunningMethod = DFSClient.class.getDeclaredMethod("isClientRunning");<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    isClientRunningMethod.setAccessible(true);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    return new DFSClientAdaptor() {<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>      @Override<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      public boolean isClientRunning(DFSClient client) {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        try {<a name="line.235"></a>
-<span class="sourceLineNo">236</span>          return (Boolean) isClientRunningMethod.invoke(client);<a name="line.236"></a>
-<span class="sourceLineNo">237</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.237"></a>
-<span class="sourceLineNo">238</span>          throw new RuntimeException(e);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>        }<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      }<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    };<a name="line.241"></a>
-<span class="sourceLineNo">242</span>  }<a name="line.242"></a>
-<span class="sourceLineNo">243</span><a name="line.243"></a>
-<span class="sourceLineNo">244</span>  private static LeaseManager createLeaseManager() throws NoSuchMethodException {<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    Method beginFileLeaseMethod =<a name="line.245"></a>
-<span class="sourceLineNo">246</span>        DFSClient.class.getDeclaredMethod("beginFileLease", long.class, DFSOutputStream.class);<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    beginFileLeaseMethod.setAccessible(true);<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    Method endFileLeaseMethod = DFSClient.class.getDeclaredMethod("endFileLease", long.class);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    endFileLeaseMethod.setAccessible(true);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    return new LeaseManager() {<a name="line.250"></a>
-<span class="sourceLineNo">251</span><a name="line.251"></a>
-<span class="sourceLineNo">252</span>      @Override<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      public void begin(DFSClient client, long inodeId) {<a name="line.253"></a>
-<span class="sourceLineNo">254</span>        try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          beginFileLeaseMethod.invoke(client, inodeId, null);<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          throw new RuntimeException(e);<a name="line.257"></a>
-<span class="sourceLineNo">258</span>        }<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      }<a name="line.259"></a>
-<span class="sourceLineNo">260</span><a name="line.260"></a>
-<span class="sourceLineNo">261</span>      @Override<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      public void end(DFSClient client, long inodeId) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>        try {<a name="line.263"></a>
-<span class="sourceLineNo">264</span>          endFileLeaseMethod.invoke(client, inodeId);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>          throw new RuntimeException(e);<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        }<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      }<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    };<a name="line.269"></a>
+<span class="sourceLineNo">196</span>  private static LeaseManager createLeaseManager() throws NoSuchMethodException {<a name="line.196"></a>
+<span class="sourceLineNo">197</span>    Method beginFileLeaseMethod =<a name="line.197"></a>
+<span class="sourceLineNo">198</span>        DFSClient.class.getDeclaredMethod("beginFileLease", long.class, DFSOutputStream.class);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    beginFileLeaseMethod.setAccessible(true);<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    Method endFileLeaseMethod = DFSClient.class.getDeclaredMethod("endFileLease", long.class);<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    endFileLeaseMethod.setAccessible(true);<a name="line.201"></a>
+<span class="sourceLineNo">202</span>    return new LeaseManager() {<a name="line.202"></a>
+<span class="sourceLineNo">203</span><a name="line.203"></a>
+<span class="sourceLineNo">204</span>      @Override<a name="line.204"></a>
+<span class="sourceLineNo">205</span>      public void begin(DFSClient client, long inodeId) {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>        try {<a name="line.206"></a>
+<span class="sourceLineNo">207</span>          beginFileLeaseMethod.invoke(client, inodeId, null);<a name="line.207"></a>
+<span class="sourceLineNo">208</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>          throw new RuntimeException(e);<a name="line.209"></a>
+<span class="sourceLineNo">210</span>        }<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      }<a name="line.211"></a>
+<span class="sourceLineNo">212</span><a name="line.212"></a>
+<span class="sourceLineNo">213</span>      @Override<a name="line.213"></a>
+<span class="sourceLineNo">214</span>      public void end(DFSClient client, long inodeId) {<a name="line.214"></a>
+<span class="sourceLineNo">215</span>        try {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>          endFileLeaseMethod.invoke(client, inodeId);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.217"></a>
+<span class="sourceLineNo">218</span>          throw new RuntimeException(e);<a name="line.218"></a>
+<span class="sourceLineNo">219</span>        }<a name="line.219"></a>
+<span class="sourceLineNo">220</span>      }<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    };<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  }<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.225"></a>
+<span class="sourceLineNo">226</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>      CryptoProtocolVersion[].class, String.class);<a name="line.227"></a>
+<span class="sourceLineNo">228</span><a name="line.228"></a>
+<span class="sourceLineNo">229</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.229"></a>
+<span class="sourceLineNo">230</span>        supportedVersions) -&gt; {<a name="line.230"></a>
+<span class="sourceLineNo">231</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.231"></a>
+<span class="sourceLineNo">232</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    };<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  }<a name="line.234"></a>
+<span class="sourceLineNo">235</span><a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.237"></a>
+<span class="sourceLineNo">238</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      CryptoProtocolVersion[].class);<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.241"></a>
+<span class="sourceLineNo">242</span>        supportedVersions) -&gt; {<a name="line.242"></a>
+<span class="sourceLineNo">243</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.243"></a>
+<span class="sourceLineNo">244</span>        createParent, replication, blockSize, supportedVersions);<a name="line.244"></a>
+<span class="sourceLineNo">245</span>    };<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  }<a name="line.246"></a>
+<span class="sourceLineNo">247</span><a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    try {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      return createFileCreator3();<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    } catch (NoSuchMethodException e) {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    }<a name="line.253"></a>
+<span class="sourceLineNo">254</span>    return createFileCreator2();<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  }<a name="line.255"></a>
+<span class="sourceLineNo">256</span><a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // cancel the processing if DFSClient is already closed.<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.258"></a>
+<span class="sourceLineNo">259</span><a name="line.259"></a>
+<span class="sourceLineNo">260</span>    private final DFSClient client;<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>    public CancelOnClose(DFSClient client) {<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      this.client = client;<a name="line.263"></a>
+<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
+<span class="sourceLineNo">265</span><a name="line.265"></a>
+<span class="sourceLineNo">266</span>    @Override<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    public boolean progress() {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
 <span class="sourceLineNo">270</span>  }<a name="line.270"></a>
 <span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private static PipelineAckStatusGetter createPipelineAckStatusGetter27()<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      throws NoSuchMethodException {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    Method getFlagListMethod = PipelineAckProto.class.getMethod("getFlagList");<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    @SuppressWarnings("rawtypes")<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    Class&lt;? extends Enum&gt; ecnClass;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    try {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      ecnClass = Class.forName("org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck$ECN")<a name="line.278"></a>
-<span class="sourceLineNo">279</span>          .asSubclass(Enum.class);<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    } catch (ClassNotFoundException e) {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      String msg = "Couldn't properly initialize the PipelineAck.ECN class. Please " +<a name="line.281"></a>
-<span class="sourceLineNo">282</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.282"></a>
-<span class="sourceLineNo">283</span>          "HBASE-16110 for more information.";<a name="line.283"></a>
-<span class="sourceLineNo">284</span>      LOG.error(msg, e);<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      throw new Error(msg, e);<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    @SuppressWarnings("unchecked")<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    Enum&lt;?&gt; disabledECN = Enum.valueOf(ecnClass, "DISABLED");<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    Method getReplyMethod = PipelineAckProto.class.getMethod("getReply", int.class);<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    Method combineHeaderMethod =<a name="line.290"></a>
-<span class="sourceLineNo">291</span>        PipelineAck.class.getMethod("combineHeader", ecnClass, Status.class);<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    Method getStatusFromHeaderMethod =<a name="line.292"></a>
-<span class="sourceLineNo">293</span>        PipelineAck.class.getMethod("getStatusFromHeader", int.class);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return new PipelineAckStatusGetter() {<a name="line.294"></a>
-<span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>      @Override<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      public Status get(PipelineAckProto ack) {<a name="line.297"></a>
-<span class="sourceLineNo">298</span>        try {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>          @SuppressWarnings("unchecked")<a name="line.299"></a>
-<span class="sourceLineNo">300</span>          List&lt;Integer&gt; flagList = (List&lt;Integer&gt;) getFlagListMethod.invoke(ack);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>          Integer headerFlag;<a name="line.301"></a>
-<span class="sourceLineNo">302</span>          if (flagList.isEmpty()) {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>            Status reply = (Status) getReplyMethod.invoke(ack, 0);<a name="line.303"></a>
-<span class="sourceLineNo">304</span>            headerFlag = (Integer) combineHeaderMethod.invoke(null, disabledECN, reply);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>          } else {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>            headerFlag = flagList.get(0);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>          }<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          return (Status) getStatusFromHeaderMethod.invoke(null, headerFlag);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          throw new RuntimeException(e);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>        }<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    };<a name="line.313"></a>
-<span class="sourceLineNo">314</span>  }<a name="line.314"></a>
-<span class="sourceLineNo">315</span><a name="line.315"></a>
-<span class="sourceLineNo">316</span>  private static PipelineAckStatusGetter createPipelineAckStatusGetter26()<a name="line.316"></a>
-<span class="sourceLineNo">317</span>      throws NoSuchMethodException {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    Method getStatusMethod = PipelineAckProto.class.getMethod("getStatus", int.class);<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    return new PipelineAckStatusGetter() {<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>      @Override<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      public Status get(PipelineAckProto ack) {<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        try {<a name="line.323"></a>
-<span class="sourceLineNo">324</span>          return (Status) getStatusMethod.invoke(ack, 0);<a name="line.324"></a>
-<span class="sourceLineNo">325</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>          throw new RuntimeException(e);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>        }<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      }<a name="line.328"></a>
-<span class="sourceLineNo">329</span>    };<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  }<a name="line.330"></a>
-<span class="sourceLineNo">331</span><a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private static PipelineAckStatusGetter createPipelineAckStatusGetter()<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      throws NoSuchMethodException {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    try {<a name="line.334"></a>
-<span class="sourceLineNo">335</span>      return createPipelineAckStatusGetter27();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    } catch (NoSuchMethodException e) {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>      LOG.debug("Can not get expected method " + e.getMessage() +<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          ", this usually because your Hadoop is pre 2.7.0, " +<a name="line.338"></a>
-<span class="sourceLineNo">339</span>          "try the methods in Hadoop 2.6.x instead.");<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    }<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    return createPipelineAckStatusGetter26();<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  }<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private static StorageTypeSetter createStorageTypeSetter() throws NoSuchMethodException {<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    Method setStorageTypeMethod =<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        OpWriteBlockProto.Builder.class.getMethod("setStorageType", StorageTypeProto.class);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    ImmutableMap.Builder&lt;String, StorageTypeProto&gt; builder = ImmutableMap.builder();<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    for (StorageTypeProto storageTypeProto : StorageTypeProto.values()) {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      builder.put(storageTypeProto.name(), storageTypeProto);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    }<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    ImmutableMap&lt;String, StorageTypeProto&gt; name2ProtoEnum = builder.build();<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    return new StorageTypeSetter() {<a name="line.352"></a>
+<span class="sourceLineNo">272</span>  static {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    try {<a name="line.273"></a>
+<span class="sourceLineNo">274</span>      LEASE_MANAGER = createLeaseManager();<a name="line.274"></a>
+<span class="sourceLineNo">275</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      FILE_CREATOR = createFileCreator();<a name="line.276"></a>
+<span class="sourceLineNo">277</span>    } catch (Exception e) {<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.278"></a>
+<span class="sourceLineNo">279</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.279"></a>
+<span class="sourceLineNo">280</span>          "HBASE-16110 for more information.";<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      LOG.error(msg, e);<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      throw new Error(msg, e);<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  }<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  }<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    return client.getConf().createChecksum(null);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  }<a name="line.296"></a>
+<span class="sourceLineNo">297</span><a name="line.297"></a>
+<span class="sourceLineNo">298</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.298"></a>
+<span class="sourceLineNo">299</span>    List&lt;Integer&gt; flagList = ack.getFlagList();<a name="line.299"></a>
+<span class="sourceLineNo">300</span>    Integer headerFlag;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    if (flagList.isEmpty()) {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      Status reply = ack.getReply(0);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      headerFlag = PipelineAck.combineHeader(ECN.DISABLED, reply);<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    } else {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      headerFlag = flagList.get(0);<a name="line.305"></a>
+<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
+<span class="sourceLineNo">307</span>    return PipelineAck.getStatusFromHeader(headerFlag);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>  }<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.310"></a>
+<span class="sourceLineNo">311</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.311"></a>
+<span class="sourceLineNo">312</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.312"></a>
+<span class="sourceLineNo">313</span>      new ProtobufVarint32FrameDecoder(),<a name="line.313"></a>
+<span class="sourceLineNo">314</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.314"></a>
+<span class="sourceLineNo">315</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.315"></a>
+<span class="sourceLineNo">316</span><a name="line.316"></a>
+<span class="sourceLineNo">317</span>        @Override<a name="line.317"></a>
+<span class="sourceLineNo">318</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.318"></a>
+<span class="sourceLineNo">319</span>            throws Exception {<a name="line.319"></a>
+<span class="sourceLineNo">320</span>          Status pipelineStatus = resp.getStatus();<a name="line.320"></a>
+<span class="sourceLineNo">321</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.322"></a>
+<span class="sourceLineNo">323</span>          }<a name="line.323"></a>
+<span class="sourceLineNo">324</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.325"></a>
+<span class="sourceLineNo">326</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.326"></a>
+<span class="sourceLineNo">327</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.327"></a>
+<span class="sourceLineNo">328</span>                  resp.getMessage() + ", " + logInfo);<a name="line.328"></a>
+<span class="sourceLineNo">329</span>            } else {<a name="line.329"></a>
+<span class="sourceLineNo">330</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.330"></a>
+<span class="sourceLineNo">331</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.331"></a>
+<span class="sourceLineNo">332</span>            }<a name="line.332"></a>
+<span class="sourceLineNo">333</span>          }<a name="line.333"></a>
+<span class="sourceLineNo">334</span>          // success<a name="line.334"></a>
+<span class="sourceLineNo">335</span>          ChannelPipeline p = ctx.pipeline();<a name="line.335"></a>
+<span class="sourceLineNo">336</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.336"></a>
+<span class="sourceLineNo">337</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.337"></a>
+<span class="sourceLineNo">338</span>            // of pipeline.<a name="line.338"></a>
+<span class="sourceLineNo">339</span>            if (handler instanceof IdleStateHandler) {<a name="line.339"></a>
+<span class="sourceLineNo">340</span>              break;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>            }<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          }<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.343"></a>
+<span class="sourceLineNo">344</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.344"></a>
+<span class="sourceLineNo">345</span>          ctx.channel().config().setAutoRead(false);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>          promise.trySuccess(ctx.channel());<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>        @Override<a name="line.349"></a>
+<span class="sourceLineNo">350</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.350"></a>
+<span class="sourceLineNo">351</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.351"></a>
+<span class="sourceLineNo">352</span>        }<a name="line.352"></a>
 <span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>      @Override<a name="line.354"></a>
-<span class="sourceLineNo">355</span>      public OpWriteBlockProto.Builder set(OpWriteBlockProto.Builder builder, Enum&lt;?&gt; storageType) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>        Object protoEnum = name2ProtoEnum.get(storageType.name());<a name="line.356"></a>
-<span class="sourceLineNo">357</span>        try {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>          setStorageTypeMethod.invoke(builder, protoEnum);<a name="line.358"></a>
-<span class="sourceLineNo">359</span>        } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {<a name="line.359"></a>
-<span class="sourceLineNo">360</span>          throw new RuntimeException(e);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>        }<a name="line.361"></a>
-<span class="sourceLineNo">362</span>        return builder;<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      }<a name="line.363"></a>
-<span class="sourceLineNo">364</span>    };<a name="line.364"></a>
-<span class="sourceLineNo">365</span>  }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>  private static BlockAdder createBlockAdder() throws NoSuchMethodException {<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    for (Method method : ClientProtocol.class.getMethods()) {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      if (method.getName().equals("addBlock")) {<a name="line.369"></a>
-<span class="sourceLineNo">370</span>        Method addBlockMethod = method;<a name="line.370"></a>
-<span class="sourceLineNo">371</span>        Class&lt;?&gt;[] paramTypes = addBlockMethod.getParameterTypes();<a name="line.371"></a>
-<span class="sourceLineNo">372</span>        if (paramTypes[paramTypes.length - 1] == String[].class) {<a name="line.372"></a>
-<span class="sourceLineNo">373</span>          return new BlockAdder() {<a name="line.373"></a>
-<span class="sourceLineNo">374</span><a name="line.374"></a>
-<span class="sourceLineNo">375</span>            @Override<a name="line.375"></a>
-<span class="sourceLineNo">376</span>            public LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>                ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId,<a name="line.377"></a>
-<span class="sourceLineNo">378</span>                String[] favoredNodes) throws IOException {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>              try {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>                return (LocatedBlock) addBlockMethod.invoke(namenode, src, clientName, previous,<a name="line.380"></a>
-<span class="sourceLineNo">381</span>                  excludeNodes, fileId, favoredNodes);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>              } catch (IllegalAccessException e) {<a name="line.382"></a>
-<span class="sourceLineNo">383</span>                throw new RuntimeException(e);<a name="line.383"></a>
-<span class="sourceLineNo">384</span>              } catch (InvocationTargetException e) {<a name="line.384"></a>
-<span class="sourceLineNo">385</span>                Throwables.propagateIfPossible(e.getTargetException(), IOException.class);<a name="line.385"></a>
-<span class="sourceLineNo">386</span>                throw new RuntimeException(e);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>              }<a name="line.387"></a>
-<span class="sourceLineNo">388</span>            }<a name="line.388"></a>
-<span class="sourceLineNo">389</span>          };<a name="line.389"></a>
-<span class="sourceLineNo">390</span>        } else {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>          return new BlockAdder() {<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>            @Override<a name="line.393"></a>
-<span class="sourceLineNo">394</span>            public LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,<a name="line.394"></a>
-<span class="sourceLineNo">395</span>                ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId,<a name="line.395"></a>
-<span class="sourceLineNo">396</span>                String[] favoredNodes) throws IOException {<a name="line.396"></a>
-<span class="sourceLineNo">397</span>              try {<a name="line.397"></a>
-<span class="sourceLineNo">398</span>                return (LocatedBlock) addBlockMethod.invoke(namenode, src, clientName, previous,<a name="line.398"></a>
-<span class="sourceLineNo">399</span>                  excludeNodes, fileId, favoredNodes, null);<a name="line.399"></a>
-<span class="sourceLineNo">400</span>              } catch (IllegalAccessException e) {<a name="line.400"></a>
-<span class="sourceLineNo">401</span>                throw new RuntimeException(e);<a name="line.401"></a>
-<span class="sourceLineNo">402</span>              } catch (InvocationTargetException e) {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>                Throwables.propagateIfPossible(e.getTargetException(), IOException.class);<a name="line.403"></a>
-<span class="sourceLineNo">404</span>                throw new RuntimeException(e);<a name="line.404"></a>
-<span class="sourceLineNo">405</span>              }<a name="line.405"></a>
-<span class="sourceLineNo">406</span>            }<a name="line.406"></a>
-<span class="sourceLineNo">407</span>          };<a name="line.407"></a>
-<span class="sourceLineNo">408</span>        }<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      }<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    }<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    throw new NoSuchMethodException("Can not find addBlock method in ClientProtocol");<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  private static PBHelper createPBHelper() throws NoSuchMethodException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    Class&lt;?&gt; helperClass;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    String clazzName = "org.apache.hadoop.hdfs.protocolPB.PBHelperClient";<a name="line.416"></a>
-<span class="sourceLineNo">417</span>    try {<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      helperClass = Class.forName(clazzName);<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    } catch (ClassNotFoundException e) {<a name="line.419"></a>
-<span class="sourceLineNo">420</span>      helperClass = org.apache.hadoop.hdfs.protocolPB.PBHelper.class;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      LOG.debug("" + clazzName + " not found (Hadoop is pre-2.8.0?); using " +<a name="line.421"></a>
-<span class="sourceLineNo">422</span>          helperClass.toString() + " instead.");<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    }<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    Method convertEBMethod = helperClass.getMethod("convert", ExtendedBlock.class);<a name="line.424"></a>
-<span class="sourceLineNo">425</span>    Method convertTokenMethod = helperClass.getMethod("convert", Token.class);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    return new PBHelper() {<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>      @Override<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      public ExtendedBlockProto convert(ExtendedBlock b) {<a name="line.429"></a>
-<span class="sourceLineNo">430</span>        try {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>          return (ExtendedBlockProto) convertEBMethod.invoke(null, b);<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.432"></a>
-<span class="sourceLineNo">433</span>          throw new RuntimeException(e);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>        }<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      }<a name="line.435"></a>
+<span class="sourceLineNo">354</span>        @Override<a name="line.354"></a>
+<span class="sourceLineNo">355</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>            promise<a name="line.357"></a>
+<span class="sourceLineNo">358</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.358"></a>
+<span class="sourceLineNo">359</span>          } else {<a name="line.359"></a>
+<span class="sourceLineNo">360</span>            super.userEventTriggered(ctx, evt);<a name="line.360"></a>
+<span class="sourceLineNo">361</span>          }<a name="line.361"></a>
+<span class="sourceLineNo">362</span>        }<a name="line.362"></a>
+<span class="sourceLineNo">363</span><a name="line.363"></a>
+<span class="sourceLineNo">364</span>        @Override<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.365"></a>
+<span class="sourceLineNo">366</span>          promise.tryFailure(cause);<a name="line.366"></a>
+<span class="sourceLineNo">367</span>        }<a name="line.367"></a>
+<span class="sourceLineNo">368</span>      });<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  }<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>  private static void requestWriteBlock(Channel channel, StorageType storageType,<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    OpWriteBlockProto proto =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    int protoLen = proto.getSerializedSize();<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    ByteBuf buffer =<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    channel.writeAndFlush(buffer);<a name="line.381"></a>
+<span class="sourceLineNo">382</span>  }<a name="line.382"></a>
+<span class="sourceLineNo">383</span><a name="line.383"></a>
+<span class="sourceLineNo">384</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      StorageType storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      throws IOException {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.389"></a>
+<span class="sourceLineNo">390</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.390"></a>
+<span class="sourceLineNo">391</span><a name="line.391"></a>
+<span class="sourceLineNo">392</span>      @Override<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.393"></a>
+<span class="sourceLineNo">394</span>        if (future.isSuccess()) {<a name="line.394"></a>
+<span class="sourceLineNo">395</span>          // setup response processing pipeline first, then send request.<a name="line.395"></a>
+<span class="sourceLineNo">396</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.396"></a>
+<span class="sourceLineNo">397</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        } else {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>          promise.tryFailure(future.cause());<a name="line.399"></a>
+<span class="sourceLineNo">400</span>        }<a name="line.400"></a>
+<span class="sourceLineNo">401</span>      }<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    });<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.405"></a>
+<span class="sourceLineNo">406</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.406"></a>
+<span class="sourceLineNo">407</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.407"></a>
+<span class="sourceLineNo">408</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.408"></a>
+<span class="sourceLineNo">409</span>    StorageType[] storageTypes = locatedBlock.getStorageTypes();<a name="line.409"></a>
+<span class="sourceLineNo">410</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.410"></a>
+<span class="sourceLineNo">411</span>    boolean connectToDnViaHostname =<a name="line.411"></a>
+<span class="sourceLineNo">412</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.416"></a>
+<span class="sourceLineNo">417</span>      .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PBHelperClient.convert(blockCopy))<a name="line.417"></a>
+<span class="sourceLineNo">418</span>        .setToken(PBHelperClient.convert(locatedBlock.getBlockToken())))<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      .setClientName(clientName).build();<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.420"></a>
+<span class="sourceLineNo">421</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.421"></a>
+<span class="sourceLineNo">422</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.422"></a>
+<span class="sourceLineNo">423</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.423"></a>
+<span class="sourceLineNo">424</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.424"></a>
+<span class="sourceLineNo">425</span>        .setRequestedChecksum(checksumProto)<a name="line.425"></a>
+<span class="sourceLineNo">426</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.429"></a>
+<span class="sourceLineNo">430</span>      StorageType storageType = storageTypes[i];<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.431"></a>
+<span class="sourceLineNo">432</span>      futureList.add(promise);<a name="line.432"></a>
+<span class="sourceLineNo">433</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.433"></a>
+<span class="sourceLineNo">434</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.434"></a>
+<span class="sourceLineNo">435</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.435"></a>
 <span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>      @Override<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      public TokenProto convert(Token&lt;?&gt; tok) {<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        try {<a name="line.439"></a>
-<span class="sourceLineNo">440</span>          return (TokenProto) convertTokenMethod.invoke(null, tok);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>          throw new RuntimeException(e);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        }<a name="line.443"></a>
-<span class="sourceLineNo">444</span>      }<a name="line.444"></a>
-<span class="sourceLineNo">445</span>    };<a name="line.445"></a>
-<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>  private static ChecksumCreater createChecksumCreater28(Method getConfMethod, Class&lt;?&gt; confClass)<a name="line.448"></a>
-<span class="sourceLineNo">449</span>      throws NoSuchMethodException {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    for (Method method : confClass.getMethods()) {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      if (method.getName().equals("createChecksum")) {<a name="line.451"></a>
-<span class="sourceLineNo">452</span>        Method createChecksumMethod = method;<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        return new ChecksumCreater() {<a name="line.453"></a>
-<span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>          @Override<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          public DataChecksum createChecksum(DFSClient client) {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>            try {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>              return (DataChecksum) createChecksumMethod.invoke(getConfMethod.invoke(client),<a name="line.458"></a>
-<span class="sourceLineNo">459</span>                (Object) null);<a name="line.459"></a>
-<span class="sourceLineNo">460</span>            } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.460"></a>
-<span class="sourceLineNo">461</span>              throw new RuntimeException(e);<a name="line.461"></a>
-<span class="sourceLineNo">462</span>            }<a name="line.462"></a>
-<span class="sourceLineNo">463</span>          }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>        };<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      }<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    }<a name="line.466"></a>
-<span class="sourceLineNo">467</span>    throw new NoSuchMethodException("Can not find createChecksum method in DfsClientConf");<a name="line.467"></a>
-<span class="sourceLineNo">468</span>  }<a name="line.468"></a>
-<span class="sourceLineNo">469</span><a name="line.469"></a>
-<span class="sourceLineNo">470</span>  private static ChecksumCreater createChecksumCreater27(Method getConfMethod, Class&lt;?&gt; confClass)<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      throws NoSuchMethodException {<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    Method createChecksumMethod = confClass.getDeclaredMethod("createChecksum");<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    createChecksumMethod.setAccessible(true);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    return new ChecksumCreater() {<a name="line.474"></a>
-<span class="sourceLineNo">475</span><a name="line.475"></a>
-<span class="sourceLineNo">476</span>      @Override<a name="line.476"></a>
-<span class="sourceLineNo">477</span>      public DataChecksum createChecksum(DFSClient client) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>        try {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>          return (DataChecksum) createChecksumMethod.invoke(getConfMethod.invoke(client));<a name="line.479"></a>
-<span class="sourceLineNo">480</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          throw new RuntimeException(e);<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      }<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    };<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  }<a name="line.485"></a>
-<span class="sourceLineNo">486</span><a name="line.486"></a>
-<span class="sourceLineNo">487</span>  private static ChecksumCreater createChecksumCreater()<a name="line.487"></a>
-<span class="sourceLineNo">488</span>      throws NoSuchMethodException, ClassNotFoundException {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    Method getConfMethod = DFSClient.class.getMethod("getConf");<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    try {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      return createChecksumCreater28(getConfMethod,<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        Class.forName("org.apache.hadoop.hdfs.client.impl.DfsClientConf"));<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    } catch (ClassNotFoundException e) {<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      LOG.debug("No DfsClientConf class found, should be hadoop 2.7-", e);<a name="line.494"></a>
-<span class="sourceLineNo">495</span>    }<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    return createChecksumCreater27(getConfMethod,<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      Class.forName("org.apache.hadoop.hdfs.DFSClient$Conf"));<a name="line.497"></a>
-<span class="sourceLineNo">498</span>  }<a name="line.498"></a>
-<span class="sourceLineNo">499</span><a name="line.499"></a>
-<span class="sourceLineNo">500</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      CryptoProtocolVersion[].class, String.class);<a name="line.503"></a>
-<span class="sourceLineNo">504</span><a name="line.504"></a>
-<span class="sourceLineNo">505</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        supportedVersions) -&gt; {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.507"></a>
-<span class="sourceLineNo">508</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    };<a name="line.509"></a>
-<span class="sourceLineNo">510</span>  }<a name="line.510"></a>
-<span class="sourceLineNo">511</span><a name="line.511"></a>
-<span class="sourceLineNo">512</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      CryptoProtocolVersion[].class);<a name="line.515"></a>
-<span class="sourceLineNo">516</span><a name="line.516"></a>
-<span class="sourceLineNo">517</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.517"></a>
-<span class="sourceLineNo">518</span>        supportedVersions) -&gt; {<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.519"></a>
-<span class="sourceLineNo">520</span>        createParent, replication, blockSize, supportedVersions);<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    };<a name="line.521"></a>
-<span class="sourceLineNo">522</span>  }<a name="line.522"></a>
-<span class="sourceLineNo">523</span><a name="line.523"></a>
-<span class="sourceLineNo">524</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>    try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>      return createFileCreator3();<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    } catch (NoSuchMethodException e) {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    return createFileCreator2();<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  }<a name="line.531"></a>
-<span class="sourceLineNo">532</span><a name="line.532"></a>
-<span class="sourceLineNo">533</span>  // cancel the processing if DFSClient is already closed.<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>    private final DFSClient client;<a name="line.536"></a>
-<span class="sourceLineNo">537</span><a name="line.537"></a>
-<span class="sourceLineNo">538</span>    public CancelOnClose(DFSClient client) {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>      this.client = client;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    }<a name="line.540"></a>
-<span class="sourceLineNo">541</span><a name="line.541"></a>
-<span class="sourceLineNo">542</span>    @Override<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    public boolean progress() {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  static {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    try {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      PIPELINE_ACK_STATUS_GETTER = createPipelineAckStatusGetter();<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      STORAGE_TYPE_SETTER = createStorageTypeSetter();<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      BLOCK_ADDER = createBlockAdder();<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      LEASE_MANAGER = createLeaseManager();<a name="line.553"></a>
-<span class="sourceLineNo">554</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      PB_HELPER = createPBHelper();<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      CHECKSUM_CREATER = createChecksumCreater();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>      FILE_CREATOR = createFileCreator();<a name="line.557"></a>
-<span class="sourceLineNo">558</span>    } catch (Exception e) {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.559"></a>
-<span class="sourceLineNo">560</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.560"></a>
-<span class="sourceLineNo">561</span>          "HBASE-16110 for more information.";<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      LOG.error(msg, e);<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      throw new Error(msg, e);<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    }<a name="line.564"></a>
-<span class="sourceLineNo">565</span>  }<a name="line.565"></a>
-<span class="sourceLineNo">566</span><a name="line.566"></a>
-<span class="sourceLineNo">567</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.568"></a>
-<span class="sourceLineNo">569</span>  }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.572"></a>
-<span class="sourceLineNo">573</span>  }<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    return CHECKSUM_CREATER.createChecksum(client);<a name="line.576"></a>
-<span class="sourceLineNo">577</span>  }<a name="line.577"></a>
-<span class="sourceLineNo">578</span><a name="line.578"></a>
-<span class="sourceLineNo">579</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    return PIPELINE_ACK_STATUS_GETTER.get(ack);<a name="line.580"></a>
-<span class="sourceLineNo">581</span>  }<a name="line.581"></a>
-<span class="sourceLineNo">582</span><a name="line.582"></a>
-<span class="sourceLineNo">583</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.583"></a>
-<span class="sourceLineNo">584</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.584"></a>
-<span class="sourceLineNo">585</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      new ProtobufVarint32FrameDecoder(),<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>        @Override<a name="line.590"></a>
-<span class="sourceLineNo">591</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>            throws Exception {<a name="line.592"></a>
-<span class="sourceLineNo">593</span>          Status pipelineStatus = resp.getStatus();<a name="line.593"></a>
-<span class="sourceLineNo">594</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.595"></a>
-<span class="sourceLineNo">596</span>          }<a name="line.596"></a>
-<span class="sourceLineNo">597</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.600"></a>
-<span class="sourceLineNo">601</span>                  resp.getMessage() + ", " + logInfo);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>            } else {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.603"></a>
-<span class="sourceLineNo">604</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>            }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>          }<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          // success<a name="line.607"></a>
-<span class="sourceLineNo">608</span>          ChannelPipeline p = ctx.pipeline();<a name="line.608"></a>
-<span class="sourceLineNo">609</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.610"></a>
-<span class="sourceLineNo">611</span>            // of pipeline.<a name="line.611"></a>
-<span class="sourceLineNo">612</span>            if (handler instanceof IdleStateHandler) {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>              break;<a name="line.613"></a>
-<span class="sourceLineNo">614</span>            }<a name="line.614"></a>
-<span class="sourceLineNo">615</span>          }<a name="line.615"></a>
-<span class="sourceLineNo">616</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.616"></a>
-<span class="sourceLineNo">617</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.617"></a>
-<span class="sourceLineNo">618</span>          ctx.channel().config().setAutoRead(false);<a name="line.618"></a>
-<span class="sourceLineNo">619</span>          promise.trySuccess(ctx.channel());<a name="line.619"></a>
-<span class="sourceLineNo">620</span>        }<a name="line.620"></a>
-<span class="sourceLineNo">621</span><a name="line.621"></a>
-<span class="sourceLineNo">622</span>        @Override<a name="line.622"></a>
-<span class="sourceLineNo">623</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.624"></a>
-<span class="sourceLineNo">625</span>        }<a name="line.625"></a>
-<span class="sourceLineNo">626</span><a name="line.626"></a>
-<span class="sourceLineNo">627</span>        @Override<a name="line.627"></a>
-<span class="sourceLineNo">628</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.628"></a>
-<span class="sourceLineNo">629</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.629"></a>
-<span class="sourceLineNo">630</span>            promise<a name="line.630"></a>
-<span class="sourceLineNo">631</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          } else {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>            super.userEventTriggered(ctx, evt);<a name="line.633"></a>
-<span class="sourceLineNo">634</span>          }<a name="line.634"></a>
-<span class="sourceLineNo">635</span>        }<a name="line.635"></a>
-<span class="sourceLineNo">636</span><a name="line.636"></a>
-<span class="sourceLineNo">637</span>        @Override<a name="line.637"></a>
-<span class="sourceLineNo">638</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.638"></a>
-<span class="sourceLineNo">639</span>          promise.tryFailure(cause);<a name="line.639"></a>
-<span class="sourceLineNo">640</span>        }<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      });<a name="line.641"></a>
-<span class="sourceLineNo">642</span>  }<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>  private static void requestWriteBlock(Channel channel, Enum&lt;?&gt; storageType,<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.645"></a>
-<span class="sourceLineNo">646</span>    OpWriteBlockProto proto = STORAGE_TYPE_SETTER.set(writeBlockProtoBuilder, storageType).build();<a name="line.646"></a>
-<span class="sourceLineNo">647</span>    int protoLen = proto.getSerializedSize();<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    ByteBuf buffer =<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.650"></a>
-<span class="sourceLineNo">651</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.651"></a>
-<span class="sourceLineNo">652</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    channel.writeAndFlush(buffer);<a name="line.653"></a>
-<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.656"></a>
-<span class="sourceLineNo">657</span>      Enum&lt;?&gt; storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      throws IOException {<a name="line.659"></a>
-<span class="sourceLineNo">660</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.662"></a>
-<span class="sourceLineNo">663</span><a name="line.663"></a>
-<span class="sourceLineNo">664</span>      @Override<a name="line.664"></a>
-<span class="sourceLineNo">665</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        if (future.isSuccess()) {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>          // setup response processing pipeline first, then send request.<a name="line.667"></a>
-<span class="sourceLineNo">668</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.669"></a>
-<span class="sourceLineNo">670</span>        } else {<a name="line.670"></a>
-<span class="sourceLineNo">671</span>          promise.tryFailure(future.cause());<a name="line.671"></a>
-<span class="sourceLineNo">672</span>        }<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    });<a name="line.674"></a>
-<span class="sourceLineNo">675</span>  }<a name="line.675"></a>
-<span class="sourceLineNo">676</span><a name="line.676"></a>
-<span class="sourceLineNo">677</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.677"></a>
-<span class="sourceLineNo">678</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.678"></a>
-<span class="sourceLineNo">679</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    Enum&lt;?&gt;[] storageTypes = locatedBlock.getStorageTypes();<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.682"></a>
-<span class="sourceLineNo">683</span>    boolean connectToDnViaHostname =<a name="line.683"></a>
-<span class="sourceLineNo">684</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.688"></a>
-<span class="sourceLineNo">689</span>        .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PB_HELPER.convert(blockCopy))<a name="line.689"></a>
-<span class="sourceLineNo">690</span>            .setToken(PB_HELPER.convert(locatedBlock.getBlockToken())))<a name="line.690"></a>
-<span class="sourceLineNo">691</span>        .setClientName(clientName).build();<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.693"></a>
-<span class="sourceLineNo">694</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.694"></a>
-<span class="sourceLineNo">695</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.695"></a>
-<span class="sourceLineNo">696</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.696"></a>
-<span class="sourceLineNo">697</span>        .setRequestedChecksum(checksumProto)<a name="line.697"></a>
-<span class="sourceLineNo">698</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.700"></a>
-<span class="sourceLineNo">701</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.701"></a>
-<span class="sourceLineNo">702</span>      Enum&lt;?&gt; storageType = storageTypes[i];<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      futureList.add(promise);<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.705"></a>
-<span class="sourceLineNo">706</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.706"></a>
-<span class="sourceLineNo">707</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span><a name="line.708"></a>
-<span class="sourceLineNo">709</span>            @Override<a name="line.709"></a>
-<span class="sourceLineNo">710</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.710"></a>
-<span class="sourceLineNo">711</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.711"></a>
-<span class="sourceLineNo">712</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.712"></a>
-<span class="sourceLineNo">713</span>              // a null handler.<a name="line.713"></a>
-<span class="sourceLineNo">714</span>            }<a name="line.714"></a>
-<span class="sourceLineNo">715</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.715"></a>
-<span class="sourceLineNo">716</span><a name="line.716"></a>
-<span class="sourceLineNo">717</span>            @Override<a name="line.717"></a>
-<span class="sourceLineNo">718</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.718"></a>
-<span class="sourceLineNo">719</span>              if (future.isSuccess()) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.720"></a>
-<span class="sourceLineNo">721</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.721"></a>
-<span class="sourceLineNo">722</span>              } else {<a name="line.722"></a>
-<span class="sourceLineNo">723</span>                promise.tryFailure(future.cause());<a name="line.723"></a>
-<span class="sourceLineNo">724</span>              }<a name="line.724"></a>
-<span class="sourceLineNo">725</span>            }<a name="line.725"></a>
-<span class="sourceLineNo">726</span>          });<a name="line.726"></a>
-<span class="sourceLineNo">727</span>    }<a name="line.727"></a>
-<span class="sourceLineNo">728</span>    return futureList;<a name="line.728"></a>
-<span class="sourceLineNo">729</span>  }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>  /**<a name="line.731"></a>
-<span class="sourceLineNo">732</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.732"></a>
-<span class="sourceLineNo">733</span>   */<a name="line.733"></a>
-<span class="sourceLineNo">734</span>  public static class NameNodeException extends IOException {<a name="line.734"></a>
-<span class="sourceLineNo">735</span><a name="line.735"></a>
-<span class="sourceLineNo">736</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    public NameNodeException(Throwable cause) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      super(cause);<a name="line.739"></a>
-<span class="sourceLineNo">740</span>    }<a name="line.740"></a>
-<span class="sourceLineNo">741</span>  }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.743"></a>
-<span class="sourceLineNo">744</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.744"></a>
-<span class="sourceLineNo">745</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    Configuration conf = dfs.getConf();<a name="line.746"></a>
-<span class="sourceLineNo">747</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    DFSClient client = dfs.getClient();<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    String clientName = client.getClientName();<a name="line.749"></a>
-<span class="sourceLineNo">750</span>    ClientProtocol namenode = client.getNamenode();<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.751"></a>
-<span class="sourceLineNo">752</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.752"></a>
-<span class="sourceLineNo">753</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    for (int retry = 0;; retry++) {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      HdfsFileStatus stat;<a name="line.755"></a>
-<span class="sourceLineNo">756</span>      try {<a name="line.756"></a>
-<span class="sourceLineNo">757</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.757"></a>
-<span class="sourceLineNo">758</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.758"></a>
-<span class="sourceLineNo">759</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.759"></a>
-<span class="sourceLineNo">760</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      } catch (Exception e) {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>        if (e instanceof RemoteException) {<a name="line.762"></a>
-<span class="sourceLineNo">763</span>          throw (RemoteException) e;<a name="line.763"></a>
-<span class="sourceLineNo">764</span>        } else {<a name="line.764"></a>
-<span class="sourceLineNo">765</span>          throw new NameNodeException(e);<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        }<a name="line.766"></a>
-<span class="sourceLineNo">767</span>      }<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      beginFileLease(client, stat.getFileId());<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      boolean succ = false;<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      LocatedBlock locatedBlock = null;<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      try {<a name="line.772"></a>
-<span class="sourceLineNo">773</span>        DataChecksum summer = createChecksum(client);<a name="line.773"></a>
-<span class="sourceLineNo">774</span>        locatedBlock = BLOCK_ADDER.addBlock(namenode, src, client.getClientName(), null,<a name="line.774"></a>
-<span class="sourceLineNo">775</span>          excludesNodes, stat.getFileId(), null);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.776"></a>
-<span class="sourceLineNo">777</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.777"></a>
-<span class="sourceLineNo">778</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.778"></a>
-<span class="sourceLineNo">779</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>          try {<a name="line.780"></a>
-<span class="sourceLineNo">781</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.781"></a>
-<span class="sourceLineNo">782</span>          } catch (Exception e) {<a name="line.782"></a>
-<span class="sourceLineNo">783</span>            // exclude the broken DN next time<a name="line.783"></a>
-<span class="sourceLineNo">784</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.784"></a>
-<span class="sourceLineNo">785</span>            throw e;<a name="line.785"></a>
-<span class="sourceLineNo">786</span>          }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>        }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.788"></a>
-<span class="sourceLineNo">789</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.789"></a>
-<span class="sourceLineNo">790</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.790"></a>
-<span class="sourceLineNo">791</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.791"></a>
-<span class="sourceLineNo">792</span>        succ = true;<a name="line.792"></a>
-<span class="sourceLineNo">793</span>        return output;<a name="line.793"></a>
-<span class="sourceLineNo">794</span>      } catch (RemoteException e) {<a name="line.794"></a>
-<span class="sourceLineNo">795</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.795"></a>
-<span class="sourceLineNo">796</span>        if (shouldRetryCreate(e)) {<a name="line.796"></a>
-<span class="sourceLineNo">797</span>          if (retry &gt;= createMaxRetries) {<a name="line.797"></a>
-<span class="sourceLineNo">798</span>            throw e.unwrapRemoteException();<a name="line.798"></a>
-<span class="sourceLineNo">799</span>          }<a name="line.799"></a>
-<span class="sourceLineNo">800</span>        } else {<a name="line.800"></a>
-<span class="sourceLineNo">801</span>          throw e.unwrapRemoteException();<a name="line.801"></a>
-<span class="sourceLineNo">802</span>        }<a name="line.802"></a>
-<span class="sourceLineNo">803</span>      } catch (IOException e) {<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        if (retry &gt;= createMaxRetries) {<a name="line.805"></a>
-<span class="sourceLineNo">806</span>          throw e;<a name="line.806"></a>
-<span class="sourceLineNo">807</span>        }<a name="line.807"></a>
-<span class="sourceLineNo">808</span>        // overwrite the old broken file.<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        overwrite = true;<a name="line.809"></a>
-<span class="sourceLineNo">810</span>        try {<a name="line.810"></a>
-<span class="sourceLineNo">811</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.811"></a>
-<span class="sourceLineNo">812</span>        } catch (InterruptedException ie) {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>          throw new InterruptedIOException();<a name="line.813"></a>
-<span class="sourceLineNo">814</span>        }<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      } finally {<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        if (!succ) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>          if (futureList != null) {<a name="line.817"></a>
-<span class="sourceLineNo">818</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.818"></a>
-<span class="sourceLineNo">819</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.819"></a>
-<span class="sourceLineNo">820</span><a name="line.820"></a>
-<span class="sourceLineNo">821</span>                @Override<a name="line.821"></a>
-<span class="sourceLineNo">822</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>                  if (future.isSuccess()) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>                    future.getNow().close();<a name="line.824"></a>
-<span class="sourceLineNo">825</span>                  }<a name="line.825"></a>
-<span class="sourceLineNo">826</span>                }<a name="line.826"></a>
-<span class="sourceLineNo">827</span>              });<a name="line.827"></a>
-<span class="sourceLineNo">828</span>            }<a name="line.828"></a>
-<span class="sourceLineNo">829</span>          }<a name="line.829"></a>
-<span class="sourceLineNo">830</span>          endFileLease(client, stat.getFileId());<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        }<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      }<a name="line.832"></a>
-<span class="sourceLineNo">833</span>    }<a name="line.833"></a>
-<span class="sourceLineNo">834</span>  }<a name="line.834"></a>
-<span class="sourceLineNo">835</span><a name="line.835"></a>
-<span class="sourceLineNo">836</span>  /**<a name="line.836"></a>
-<span class="sourceLineNo">837</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.837"></a>
-<span class="sourceLineNo">838</span>   * inside an {@link EventLoop}.<a name="line.838"></a>
-<span class="sourceLineNo">839</span>   */<a name="line.839"></a>
-<span class="sourceLineNo">840</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.840"></a>
-<span class="sourceLineNo">841</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.841"></a>
-<span class="sourceLineNo">842</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.842"></a>
-<span class="sourceLineNo">843</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.843"></a>
-<span class="sourceLineNo">844</span><a name="line.844"></a>
-<span class="sourceLineNo">845</span>      @Override<a name="line.845"></a>
-<span class="sourceLineNo">846</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.846"></a>
-<span class="sourceLineNo">847</span>          throws IOException, UnresolvedLinkException {<a name="line.847"></a>
-<span class="sourceLineNo">848</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          blockSize, eventLoopGroup, channelClass);<a name="line.849"></a>
-<span class="sourceLineNo">850</span>      }<a name="line.850"></a>
-<span class="sourceLineNo">851</span><a name="line.851"></a>
-<span class="sourceLineNo">852</span>      @Override<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.853"></a>
-<span class="sourceLineNo">854</span>        throw new UnsupportedOperationException();<a name="line.854"></a>
-<span class="sourceLineNo">855</span>      }<a name="line.855"></a>
-<span class="sourceLineNo">856</span>    }.resolve(dfs, f);<a name="line.856"></a>
-<span class="sourceLineNo">857</span>  }<a name="line.857"></a>
-<span class="sourceLineNo">858</span><a name="line.858"></a>
-<span class="sourceLineNo">859</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.861"></a>
-<span class="sourceLineNo">862</span>    // DFSOutputStream.newStreamForCreate.<a name="line.862"></a>
-<span class="sourceLineNo">863</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.863"></a>
-<span class="sourceLineNo">864</span>  }<a name="line.864"></a>
-<span class="sourceLineNo">865</span><a name="line.865"></a>
-<span class="sourceLineNo">866</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.866"></a>
-<span class="sourceLineNo">867</span>      ExtendedBlock block, long fileId) {<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    for (int retry = 0;; retry++) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      try {<a name="line.869"></a>
-<span class="sourceLineNo">870</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>          endFileLease(client, fileId);<a name="line.871"></a>
-<span class="sourceLineNo">872</span>          return;<a name="line.872"></a>
-<span class="sourceLineNo">873</span>        } else {<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>      } catch (RemoteException e) {<a name="line.876"></a>
-<span class="sourceLineNo">877</span>        IOException ioe = e.unwrapRemoteException();<a name="line.877"></a>
-<span class="sourceLineNo">878</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.879"></a>
-<span class="sourceLineNo">880</span>          return;<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        } else {<a name="line.881"></a>
-<span class="sourceLineNo">882</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.882"></a>
-<span class="sourceLineNo">883</span>        }<a name="line.883"></a>
-<span class="sourceLineNo">884</span>      } catch (Exception e) {<a name="line.884"></a>
-<span class="sourceLineNo">885</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.885"></a>
-<span class="sourceLineNo">886</span>      }<a name="line.886"></a>
-<span class="sourceLineNo">887</span>      sleepIgnoreInterrupt(retry);<a name="line.887"></a>
-<span class="sourceLineNo">888</span>    }<a name="line.888"></a>
-<span class="sourceLineNo">889</span>  }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.891"></a>
-<span class="sourceLineNo">892</span>    try {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.893"></a>
-<span class="sourceLineNo">894</span>    } catch (InterruptedException e) {<a name="line.894"></a>
-<span class="sourceLineNo">895</span>    }<a name="line.895"></a>
-<span class="sourceLineNo">896</span>  }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>}<a name="line.897"></a>
+<span class="sourceLineNo">437</span>            @Override<a name="line.437"></a>
+<span class="sourceLineNo">438</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.438"></a>
+<span class="sourceLineNo">439</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.439"></a>
+<span class="sourceLineNo">440</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.440"></a>
+<span class="sourceLineNo">441</span>              // a null handler.<a name="line.441"></a>
+<span class="sourceLineNo">442</span>            }<a name="line.442"></a>
+<span class="sourceLineNo">443</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.443"></a>
+<span class="sourceLineNo">444</span><a name="line.444"></a>
+<span class="sourceLineNo">445</span>            @Override<a name="line.445"></a>
+<span class="sourceLineNo">446</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>              if (future.isSuccess()) {<a name="line.447"></a>
+<span class="sourceLineNo">448</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.448"></a>
+<span class="sourceLineNo">449</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.449"></a>
+<span class="sourceLineNo">450</span>              } else {<a name="line.450"></a>
+<span class="sourceLineNo">451</span>                promise.tryFailure(future.cause());<a name="line.451"></a>
+<span class="sourceLineNo">452</span>              }<a name="line.452"></a>
+<span class="sourceLineNo">453</span>            }<a name="line.453"></a>
+<span class="sourceLineNo">454</span>          });<a name="line.454"></a>
+<span class="sourceLineNo">455</span>    }<a name="line.455"></a>
+<span class="sourceLineNo">456</span>    return futureList;<a name="line.456"></a>
+<span class="sourceLineNo">457</span>  }<a name="line.457"></a>
+<span class="sourceLineNo">458</span><a name="line.458"></a>
+<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
+<span class="sourceLineNo">460</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.460"></a>
+<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
+<span class="sourceLineNo">462</span>  public static class NameNodeException extends IOException {<a name="line.462"></a>
+<span class="sourceLineNo">463</span><a name="line.463"></a>
+<span class="sourceLineNo">464</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.464"></a>
+<span class="sourceLineNo">465</span><a name="line.465"></a>
+<span class="sourceLineNo">466</span>    public NameNodeException(Throwable cause) {<a name="line.466"></a>
+<span class="sourceLineNo">467</span>      super(cause);<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    }<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.471"></a>
+<span class="sourceLineNo">472</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.472"></a>
+<span class="sourceLineNo">473</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.473"></a>
+<span class="sourceLineNo">474</span>    Configuration conf = dfs.getConf();<a name="line.474"></a>
+<span class="sourceLineNo">475</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.475"></a>
+<span class="sourceLineNo">476</span>    DFSClient client = dfs.getClient();<a name="line.476"></a>
+<span class="sourceLineNo">477</span>    String clientName = client.getClientName();<a name="line.477"></a>
+<span class="sourceLineNo">478</span>    ClientProtocol namenode = client.getNamenode();<a name="line.478"></a>
+<span class="sourceLineNo">479</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.479"></a>
+<span class="sourceLineNo">480</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.480"></a>
+<span class="sourceLineNo">481</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.481"></a>
+<span class="sourceLineNo">482</span>    for (int retry = 0;; retry++) {<a name="line.482"></a>
+<span class="sourceLineNo">483</span>      HdfsFileStatus stat;<a name="line.483"></a>
+<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
+<span class="sourceLineNo">485</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.485"></a>
+<span class="sourceLineNo">486</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.486"></a>
+<span class="sourceLineNo">487</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.487"></a>
+<span class="sourceLineNo">488</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.488"></a>
+<span class="sourceLineNo">489</span>      } catch (Exception e) {<a name="line.489"></a>
+<span class="sourceLineNo">490</span>        if (e instanceof RemoteException) {<a name="line.490"></a>
+<span class="sourceLineNo">491</span>          throw (RemoteException) e;<a name="line.491"></a>
+<span class="sourceLineNo">492</span>        } else {<a name="line.492"></a>
+<span class="sourceLineNo">493</span>          throw new NameNodeException(e);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>        }<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      }<a name="line.495"></a>
+<span class="sourceLineNo">496</span>      beginFileLease(client, stat.getFileId());<a name="line.496"></a>
+<span class="sourceLineNo">497</span>      boolean succ = false;<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      LocatedBlock locatedBlock = null;<a name="line.498"></a>
+<span class="sourceLineNo">499</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.499"></a>
+<span class="sourceLineNo">500</span>      try {<a name="line.500"></a>
+<span class="sourceLineNo">501</span>        DataChecksum summer = createChecksum(client);<a name="line.501"></a>
+<span class="sourceLineNo">502</span>        locatedBlock = namenode.addBlock(src, client.getClientName(), null, excludesNodes,<a name="line.502"></a>
+<span class="sourceLineNo">503</span>          stat.getFileId(), null, null);<a name="line.503"></a>
+<span class="sourceLineNo">504</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.504"></a>
+<span class="sourceLineNo">505</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.505"></a>
+<span class="sourceLineNo">506</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.506"></a>
+<span class="sourceLineNo">507</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>          try {<a name="line.508"></a>
+<span class="sourceLineNo">509</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.509"></a>
+<span class="sourceLineNo">510</span>          } catch (Exception e) {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>            // exclude the broken DN next time<a name="line.511"></a>
+<span class="sourceLineNo">512</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.512"></a>
+<span class="sourceLineNo">513</span>            throw e;<a name="line.513"></a>
+<span class="sourceLineNo">514</span>          }<a name="line.514"></a>
+<span class="sourceLineNo">515</span>        }<a name="line.515"></a>
+<span class="sourceLineNo">516</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.516"></a>
+<span class="sourceLineNo">517</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.517"></a>
+<span class="sourceLineNo">518</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.518"></a>
+<span class="sourceLineNo">519</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.519"></a>
+<span class="sourceLineNo">520</span>        succ = true;<a name="line.520"></a>
+<span class="sourceLineNo">521</span>        return output;<a name="line.521"></a>
+<span class="sourceLineNo">522</span>      } catch (RemoteException e) {<a name="line.522"></a>
+<span class="sourceLineNo">523</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.523"></a>
+<span class="sourceLineNo">524</span>        if (shouldRetryCreate(e)) {<a name="line.524"></a>
+<span class="sourceLineNo">525</span>          if (retry &gt;= createMaxRetries) {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>            throw e.unwrapRemoteException();<a name="line.526"></a>
+<span class="sourceLineNo">527</span>          }<a name="line.527"></a>
+<span class="sourceLineNo">528</span>        } else {<a name="line.528"></a>
+<span class="sourceLineNo">529</span>          throw e.unwrapRemoteException();<a name="line.529"></a>
+<span class="sourceLineNo">530</span>        }<a name="line.530"></a>
+<span class="sourceLineNo">531</span>      } catch (IOException e) {<a name="line.531"></a>
+<span class="sourceLineNo">532</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.532"></a>
+<span class="sourceLineNo">533</span>        if (retry &gt;= createMaxRetries) {<a name="line.533"></a>
+<span class="sourceLineNo">534</span>          throw e;<a name="line.534"></a>
+<span class="sourceLineNo">535</span>        }<a name="line.535"></a>
+<span class="sourceLineNo">536</span>        // overwrite the old broken file.<a name="line.536"></a>
+<span class="sourceLineNo">537</span>        overwrite = true;<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        try {<a name="line.538"></a>
+<span class="sourceLineNo">539</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.539"></a>
+<span class="sourceLineNo">540</span>        } catch (InterruptedException ie) {<a name="line.540"></a>
+<span class="sourceLineNo">541</span>          throw new InterruptedIOException();<a name="line.541"></a>
+<span class="sourceLineNo">542</span>        }<a name="line.542"></a>
+<span class="sourceLineNo">543</span>      } finally {<a name="line.543"></a>
+<span class="sourceLineNo">544</span>        if (!succ) {<a name="line.544"></a>
+<span class="sourceLineNo">545</span>          if (futureList != null) {<a name="line.545"></a>
+<span class="sourceLineNo">546</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.546"></a>
+<span class="sourceLineNo">547</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.547"></a>
+<span class="sourceLineNo">548</span><a name="line.548"></a>
+<span class="sourceLineNo">549</span>                @Override<a name="line.549"></a>
+<span class="sourceLineNo">550</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.550"></a>
+<span class="sourceLineNo">551</span>                  if (future.isSuccess()) {<a name="line.551"></a>
+<span class="sourceLineNo">552</span>                    future.getNow().close();<a name="line.552"></a>
+<span class="sourceLineNo">553</span>                  }<a name="line.553"></a>
+<span class="sourceLineNo">554</span>                }<a name="line.554"></a>
+<span class="sourceLineNo">555</span>              });<a name="line.555"></a>
+<span class="sourceLineNo">556</span>            }<a name="line.556"></a>
+<span class="sourceLineNo">557</span>          }<a name="line.557"></a>
+<span class="sourceLineNo">558</span>          endFileLease(client, stat.getFileId());<a name="line.558"></a>
+<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
+<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
+<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
+<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
+<span class="sourceLineNo">563</span><a name="line.563"></a>
+<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
+<span class="sourceLineNo">565</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.565"></a>
+<span class="sourceLineNo">566</span>   * inside an {@link EventLoop}.<a name="line.566"></a>
+<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
+<span class="sourceLineNo">568</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.568"></a>
+<span class="sourceLineNo">569</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.569"></a>
+<span class="sourceLineNo">570</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.570"></a>
+<span class="sourceLineNo">571</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.571"></a>
+<span class="sourceLineNo">572</span><a name="line.572"></a>
+<span class="sourceLineNo">573</span>      @Override<a name="line.573"></a>
+<span class="sourceLineNo">574</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.574"></a>
+<span class="sourceLineNo">575</span>          throws IOException, UnresolvedLinkException {<a name="line.575"></a>
+<span class="sourceLineNo">576</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.576"></a>
+<span class="sourceLineNo">577</span>          blockSize, eventLoopGroup, channelClass);<a name="line.577"></a>
+<span class="sourceLineNo">578</span>      }<a name="line.578"></a>
+<span class="sourceLineNo">579</span><a name="line.579"></a>
+<span class="sourceLineNo">580</span>      @Override<a name="line.580"></a>
+<span class="sourceLineNo">581</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.581"></a>
+<span class="sourceLineNo">582</span>        throw new UnsupportedOperationException();<a name="line.582"></a>
+<span class="sourceLineNo">583</span>      }<a name="line.583"></a>
+<span class="sourceLineNo">584</span>    }.resolve(dfs, f);<a name="line.584"></a>
+<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
+<span class="sourceLineNo">586</span><a name="line.586"></a>
+<span class="sourceLineNo">587</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.587"></a>
+<span class="sourceLineNo">588</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.588"></a>
+<span class="sourceLineNo">589</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.589"></a>
+<span class="sourceLineNo">590</span>    // DFSOutputStream.newStreamForCreate.<a name="line.590"></a>
+<span class="sourceLineNo">591</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.591"></a>
+<span class="sourceLineNo">592</span>  }<a name="line.592"></a>
+<span class="sourceLineNo">593</span><a name="line.593"></a>
+<span class="sourceLineNo">594</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.594"></a>
+<span class="sourceLineNo">595</span>      ExtendedBlock block, long fileId) {<a name="line.595"></a>
+<span class="sourceLineNo">596</span>    for (int retry = 0;; retry++) {<a name="line.596"></a>
+<span class="sourceLineNo">597</span>      try {<a name="line.597"></a>
+<span class="sourceLineNo">598</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.598"></a>
+<span class="sourceLineNo">599</span>          endFileLease(client, fileId);<a name="line.599"></a>
+<span class="sourceLineNo">600</span>          return;<a name="line.600"></a>
+<span class="sourceLineNo">601</span>        } else {<a name="line.601"></a>
+<span class="sourceLineNo">602</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.602"></a>
+<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
+<span class="sourceLineNo">604</span>      } catch (RemoteException e) {<a name="line.604"></a>
+<span class="sourceLineNo">605</span>        IOException ioe = e.unwrapRemoteException();<a name="line.605"></a>
+<span class="sourceLineNo">606</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.606"></a>
+<span class="sourceLineNo">607</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.607"></a>
+<span class="sourceLineNo">608</span>          return;<a name="line.608"></a>
+<span class="sourceLineNo">609</span>        } else {<a name="line.609"></a>
+<span class="sourceLineNo">610</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.610"></a>
+<span class="sourceLineNo">611</span>        }<a name="line.611"></a>
+<span class="sourceLineNo">612</span>      } catch (Exception e) {<a name="line.612"></a>
+<span class="sourceLineNo">613</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.613"></a>
+<span class="sourceLineNo">614</span>      }<a name="line.614"></a>
+<span class="sourceLineNo">615</span>      sleepIgnoreInterrupt(retry);<a name="line.615"></a>
+<span class="sourceLineNo">616</span>    }<a name="line.616"></a>
+<span class="sourceLineNo">617</span>  }<a name="line.617"></a>
+<span class="sourceLineNo">618</span><a name="line.618"></a>
+<span class="sourceLineNo">619</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.619"></a>
+<span class="sourceLineNo">620</span>    try {<a name="line.620"></a>
+<span class="sourceLineNo">621</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.621"></a>
+<span class="sourceLineNo">622</span>    } catch (InterruptedException e) {<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    }<a name="line.623"></a>
+<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
+<span class="sourceLineNo">625</span>}<a name="line.625"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
deleted file mode 100644
index eb6a26e..0000000
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.ChecksumCreater.html
+++ /dev/null
@@ -1,969 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<html lang="en">
-<head>
-<title>Source code</title>
-<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
-</head>
-<body>
-<div class="sourceContainer">
-<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a>
-<span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a>
-<span class="sourceLineNo">003</span> * or more contributor license agreements.  See the NOTICE file<a name="line.3"></a>
-<span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a>
-<span class="sourceLineNo">005</span> * regarding copyright ownership.  The ASF licenses this file<a name="line.5"></a>
-<span class="sourceLineNo">006</span> * to you under the Apache License, Version 2.0 (the<a name="line.6"></a>
-<span class="sourceLineNo">007</span> * "License"); you may not use this file except in compliance<a name="line.7"></a>
-<span class="sourceLineNo">008</span> * with the License.  You may obtain a copy of the License at<a name="line.8"></a>
-<span class="sourceLineNo">009</span> *<a name="line.9"></a>
-<span class="sourceLineNo">010</span> *     http://www.apache.org/licenses/LICENSE-2.0<a name="line.10"></a>
-<span class="sourceLineNo">011</span> *<a name="line.11"></a>
-<span class="sourceLineNo">012</span> * Unless required by applicable law or agreed to in writing, software<a name="line.12"></a>
-<span class="sourceLineNo">013</span> * distributed under the License is distributed on an "AS IS" BASIS,<a name="line.13"></a>
-<span class="sourceLineNo">014</span> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<a name="line.14"></a>
-<span class="sourceLineNo">015</span> * See the License for the specific language governing permissions and<a name="line.15"></a>
-<span class="sourceLineNo">016</span> * limitations under the License.<a name="line.16"></a>
-<span class="sourceLineNo">017</span> */<a name="line.17"></a>
-<span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.io.asyncfs;<a name="line.18"></a>
-<span class="sourceLineNo">019</span><a name="line.19"></a>
-<span class="sourceLineNo">020</span>import static org.apache.hadoop.fs.CreateFlag.CREATE;<a name="line.20"></a>
-<span class="sourceLineNo">021</span>import static org.apache.hadoop.fs.CreateFlag.OVERWRITE;<a name="line.21"></a>
-<span class="sourceLineNo">022</span>import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createEncryptor;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.trySaslNegotiate;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import static org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage.PIPELINE_SETUP_CREATE;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import static org.apache.hbase.thirdparty.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import static org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE;<a name="line.29"></a>
-<span class="sourceLineNo">030</span><a name="line.30"></a>
-<span class="sourceLineNo">031</span>import com.google.protobuf.CodedOutputStream;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import java.io.IOException;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import java.io.InterruptedIOException;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import java.lang.reflect.InvocationTargetException;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import java.lang.reflect.Method;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import java.util.ArrayList;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import java.util.EnumSet;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import java.util.List;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import java.util.concurrent.TimeUnit;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.conf.Configuration;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.crypto.CryptoProtocolVersion;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.crypto.Encryptor;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.fs.CreateFlag;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.fs.FileSystem;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.fs.FileSystemLinkResolver;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.fs.Path;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.fs.UnresolvedLinkException;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hdfs.DFSOutputStream;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hdfs.protocol.ClientProtocol;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hdfs.protocol.DatanodeInfo;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hdfs.protocol.ExtendedBlock;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hdfs.protocol.LocatedBlock;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hdfs.protocol.datatransfer.Op;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.io.EnumSetWritable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.net.NetUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.security.token.Token;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.util.DataChecksum;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.slf4j.Logger;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.slf4j.LoggerFactory;<a name="line.87"></a>
-<span class="sourceLineNo">088</span><a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.io.netty.bootstrap.Bootstrap;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufAllocator;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufOutputStream;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.io.netty.buffer.PooledByteBufAllocator;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.io.netty.channel.Channel;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelFuture;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelFutureListener;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandler;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelInitializer;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelPipeline;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hbase.thirdparty.io.netty.channel.EventLoop;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufDecoder;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateHandler;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.Future;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.FutureListener;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise;<a name="line.112"></a>
-<span class="sourceLineNo">113</span><a name="line.113"></a>
-<span class="sourceLineNo">114</span>/**<a name="line.114"></a>
-<span class="sourceLineNo">115</span> * Helper class for implementing {@link FanOutOneBlockAsyncDFSOutput}.<a name="line.115"></a>
-<span class="sourceLineNo">116</span> */<a name="line.116"></a>
-<span class="sourceLineNo">117</span>@InterfaceAudience.Private<a name="line.117"></a>
-<span class="sourceLineNo">118</span>public final class FanOutOneBlockAsyncDFSOutputHelper {<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  private static final Logger LOG =<a name="line.119"></a>
-<span class="sourceLineNo">120</span>      LoggerFactory.getLogger(FanOutOneBlockAsyncDFSOutputHelper.class);<a name="line.120"></a>
-<span class="sourceLineNo">121</span><a name="line.121"></a>
-<span class="sourceLineNo">122</span>  private FanOutOneBlockAsyncDFSOutputHelper() {<a name="line.122"></a>
-<span class="sourceLineNo">123</span>  }<a name="line.123"></a>
-<span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>  public static final String ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES = "hbase.fs.async.create.retries";<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  public static final int DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES = 10;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  // use pooled allocator for performance.<a name="line.128"></a>
-<span class="sourceLineNo">129</span>  private static final ByteBufAllocator ALLOC = PooledByteBufAllocator.DEFAULT;<a name="line.129"></a>
-<span class="sourceLineNo">130</span><a name="line.130"></a>
-<span class="sourceLineNo">131</span>  // copied from DFSPacket since it is package private.<a name="line.131"></a>
-<span class="sourceLineNo">132</span>  public static final long HEART_BEAT_SEQNO = -1L;<a name="line.132"></a>
-<span class="sourceLineNo">133</span><a name="line.133"></a>
-<span class="sourceLineNo">134</span>  // Timeouts for communicating with DataNode for streaming writes/reads<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  public static final int READ_TIMEOUT = 60 * 1000;<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>  private static final DatanodeInfo[] EMPTY_DN_ARRAY = new DatanodeInfo[0];<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>  // helper class for getting Status from PipelineAckProto. In hadoop 2.6 or before, there is a<a name="line.139"></a>
-<span class="sourceLineNo">140</span>  // getStatus method, and for hadoop 2.7 or after, the status is retrieved from flag. The flag may<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  // get from proto directly, or combined by the reply field of the proto and a ECN object. See<a name="line.141"></a>
-<span class="sourceLineNo">142</span>  // createPipelineAckStatusGetter for more details.<a name="line.142"></a>
-<span class="sourceLineNo">143</span>  private interface PipelineAckStatusGetter {<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    Status get(PipelineAckProto ack);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  private static final PipelineAckStatusGetter PIPELINE_ACK_STATUS_GETTER;<a name="line.147"></a>
-<span class="sourceLineNo">148</span><a name="line.148"></a>
-<span class="sourceLineNo">149</span>  // StorageType enum is placed under o.a.h.hdfs in hadoop 2.6 and o.a.h.fs in hadoop 2.7. So here<a name="line.149"></a>
-<span class="sourceLineNo">150</span>  // we need to use reflection to set it.See createStorageTypeSetter for more details.<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  private interface StorageTypeSetter {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    OpWriteBlockProto.Builder set(OpWriteBlockProto.Builder builder, Enum&lt;?&gt; storageType);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>  private static final StorageTypeSetter STORAGE_TYPE_SETTER;<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>  // helper class for calling add block method on namenode. There is a addBlockFlags parameter for<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  // hadoop 2.8 or later. See createBlockAdder for more details.<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  private interface BlockAdder {<a name="line.159"></a>
-<span class="sourceLineNo">160</span><a name="line.160"></a>
-<span class="sourceLineNo">161</span>    LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,<a name="line.161"></a>
-<span class="sourceLineNo">162</span>        ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId, String[] favoredNodes)<a name="line.162"></a>
-<span class="sourceLineNo">163</span>        throws IOException;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  }<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>  private static final BlockAdder BLOCK_ADDER;<a name="line.166"></a>
-<span class="sourceLineNo">167</span><a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private interface LeaseManager {<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>    void begin(DFSClient client, long inodeId);<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>    void end(DFSClient client, long inodeId);<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>  private static final LeaseManager LEASE_MANAGER;<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>  // This is used to terminate a recoverFileLease call when FileSystem is already closed.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  // isClientRunning is not public so we need to use reflection.<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  private interface DFSClientAdaptor {<a name="line.179"></a>
-<span class="sourceLineNo">180</span><a name="line.180"></a>
-<span class="sourceLineNo">181</span>    boolean isClientRunning(DFSClient client);<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  }<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>  private static final DFSClientAdaptor DFS_CLIENT_ADAPTOR;<a name="line.184"></a>
-<span class="sourceLineNo">185</span><a name="line.185"></a>
-<span class="sourceLineNo">186</span>  // helper class for convert protos.<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  private interface PBHelper {<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>    ExtendedBlockProto convert(ExtendedBlock b);<a name="line.189"></a>
-<span class="sourceLineNo">190</span><a name="line.190"></a>
-<span class="sourceLineNo">191</span>    TokenProto convert(Token&lt;?&gt; tok);<a name="line.191"></a>
-<span class="sourceLineNo">192</span>  }<a name="line.192"></a>
-<span class="sourceLineNo">193</span><a name="line.193"></a>
-<span class="sourceLineNo">194</span>  private static final PBHelper PB_HELPER;<a name="line.194"></a>
-<span class="sourceLineNo">195</span><a name="line.195"></a>
-<span class="sourceLineNo">196</span>  // helper class for creating data checksum.<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private interface ChecksumCreater {<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    DataChecksum createChecksum(DFSClient client);<a name="line.198"></a>
-<span class="sourceLineNo">199</span>  }<a name="line.199"></a>
-<span class="sourceLineNo">200</span><a name="line.200"></a>
-<span class="sourceLineNo">201</span>  private static final ChecksumCreater CHECKSUM_CREATER;<a name="line.201"></a>
-<span class="sourceLineNo">202</span><a name="line.202"></a>
-<span class="sourceLineNo">203</span>  // helper class for creating files.<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  private interface FileCreator {<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    default HdfsFileStatus create(ClientProtocol instance, String src, FsPermission masked,<a name="line.205"></a>
-<span class="sourceLineNo">206</span>        String clientName, EnumSetWritable&lt;CreateFlag&gt; flag, boolean createParent,<a name="line.206"></a>
-<span class="sourceLineNo">207</span>        short replication, long blockSize, CryptoProtocolVersion[] supportedVersions)<a name="line.207"></a>
-<span class="sourceLineNo">208</span>        throws Exception {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      try {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        return (HdfsFileStatus) createObject(instance, src, masked, clientName, flag, createParent,<a name="line.210"></a>
-<span class="sourceLineNo">211</span>          replication, blockSize, supportedVersions);<a name="line.211"></a>
-<span class="sourceLineNo">212</span>      } catch (InvocationTargetException e) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        if (e.getCause() instanceof Exception) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>          throw (Exception) e.getCause();<a name="line.214"></a>
-<span class="sourceLineNo">215</span>        } else {<a name="line.215"></a>
-<span class="sourceLineNo">216</span>          throw new RuntimeException(e.getCause());<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        }<a name="line.217"></a>
-<span class="sourceLineNo">218</span>      }<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    }<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>    Object createObject(ClientProtocol instance, String src, FsPermission masked, String clientName,<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        EnumSetWritable&lt;CreateFlag&gt; flag, boolean createParent, short replication, long blockSize,<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        CryptoProtocolVersion[] supportedVersions) throws Exception;<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  }<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>  private static final FileCreator FILE_CREATOR;<a name="line.226"></a>
-<span class="sourceLineNo">227</span><a name="line.227"></a>
-<span class="sourceLineNo">228</span>  private static DFSClientAdaptor createDFSClientAdaptor() throws NoSuchMethodException {<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    Method isClientRunningMethod = DFSClient.class.getDeclaredMethod("isClientRunning");<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    isClientRunningMethod.setAccessible(true);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    return new DFSClientAdaptor() {<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>      @Override<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      public boolean isClientRunning(DFSClient client) {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        try {<a name="line.235"></a>
-<span class="sourceLineNo">236</span>          return (Boolean) isClientRunningMethod.invoke(client);<a name="line.236"></a>
-<span class="sourceLineNo">237</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.237"></a>
-<span class="sourceLineNo">238</span>          throw new RuntimeException(e);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>        }<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      }<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    };<a name="line.241"></a>
-<span class="sourceLineNo">242</span>  }<a name="line.242"></a>
-<span class="sourceLineNo">243</span><a name="line.243"></a>
-<span class="sourceLineNo">244</span>  private static LeaseManager createLeaseManager() throws NoSuchMethodException {<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    Method beginFileLeaseMethod =<a name="line.245"></a>
-<span class="sourceLineNo">246</span>        DFSClient.class.getDeclaredMethod("beginFileLease", long.class, DFSOutputStream.class);<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    beginFileLeaseMethod.setAccessible(true);<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    Method endFileLeaseMethod = DFSClient.class.getDeclaredMethod("endFileLease", long.class);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    endFileLeaseMethod.setAccessible(true);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    return new LeaseManager() {<a name="line.250"></a>
-<span class="sourceLineNo">251</span><a name="line.251"></a>
-<span class="sourceLineNo">252</span>      @Override<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      public void begin(DFSClient client, long inodeId) {<a name="line.253"></a>
-<span class="sourceLineNo">254</span>        try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          beginFileLeaseMethod.invoke(client, inodeId, null);<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          throw new RuntimeException(e);<a name="line.257"></a>
-<span class="sourceLineNo">258</span>        }<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      }<a name="line.259"></a>
-<span class="sourceLineNo">260</span><a name="line.260"></a>
-<span class="sourceLineNo">261</span>      @Override<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      public void end(DFSClient client, long inodeId) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>        try {<a name="line.263"></a>
-<span class="sourceLineNo">264</span>          endFileLeaseMethod.invoke(client, inodeId);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>          throw new RuntimeException(e);<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        }<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      }<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    };<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private static PipelineAckStatusGetter createPipelineAckStatusGetter27()<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      throws NoSuchMethodException {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    Method getFlagListMethod = PipelineAckProto.class.getMethod("getFlagList");<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    @SuppressWarnings("rawtypes")<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    Class&lt;? extends Enum&gt; ecnClass;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    try {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      ecnClass = Class.forName("org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck$ECN")<a name="line.278"></a>
-<span class="sourceLineNo">279</span>          .asSubclass(Enum.class);<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    } catch (ClassNotFoundException e) {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      String msg = "Couldn't properly initialize the PipelineAck.ECN class. Please " +<a name="line.281"></a>
-<span class="sourceLineNo">282</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.282"></a>
-<span class="sourceLineNo">283</span>          "HBASE-16110 for more information.";<a name="line.283"></a>
-<span class="sourceLineNo">284</span>      LOG.error(msg, e);<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      throw new Error(msg, e);<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    @SuppressWarnings("unchecked")<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    Enum&lt;?&gt; disabledECN = Enum.valueOf(ecnClass, "DISABLED");<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    Method getReplyMethod = PipelineAckProto.class.getMethod("getReply", int.class);<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    Method combineHeaderMethod =<a name="line.290"></a>
-<span class="sourceLineNo">291</span>        PipelineAck.class.getMethod("combineHeader", ecnClass, Status.class);<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    Method getStatusFromHeaderMethod =<a name="line.292"></a>
-<span class="sourceLineNo">293</span>        PipelineAck.class.getMethod("getStatusFromHeader", int.class);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return new PipelineAckStatusGetter() {<a name="line.294"></a>
-<span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>      @Override<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      public Status get(PipelineAckProto ack) {<a name="line.297"></a>
-<span class="sourceLineNo">298</span>        try {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>          @SuppressWarnings("unchecked")<a name="line.299"></a>
-<span class="sourceLineNo">300</span>          List&lt;Integer&gt; flagList = (List&lt;Integer&gt;) getFlagListMethod.invoke(ack);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>          Integer headerFlag;<a name="line.301"></a>
-<span class="sourceLineNo">302</span>          if (flagList.isEmpty()) {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>            Status reply = (Status) getReplyMethod.invoke(ack, 0);<a name="line.303"></a>
-<span class="sourceLineNo">304</span>            headerFlag = (Integer) combineHeaderMethod.invoke(null, disabledECN, reply);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>          } else {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>            headerFlag = flagList.get(0);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>          }<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          return (Status) getStatusFromHeaderMethod.invoke(null, headerFlag);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          throw new RuntimeException(e);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>        }<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    };<a name="line.313"></a>
-<span class="sourceLineNo">314</span>  }<a name="line.314"></a>
-<span class="sourceLineNo">315</span><a name="line.315"></a>
-<span class="sourceLineNo">316</span>  private static PipelineAckStatusGetter createPipelineAckStatusGetter26()<a name="line.316"></a>
-<span class="sourceLineNo">317</span>      throws NoSuchMethodException {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    Method getStatusMethod = PipelineAckProto.class.getMethod("getStatus", int.class);<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    return new PipelineAckStatusGetter() {<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>      @Override<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      public Status get(PipelineAckProto ack) {<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        try {<a name="line.323"></a>
-<span class="sourceLineNo">324</span>          return (Status) getStatusMethod.invoke(ack, 0);<a name="line.324"></a>
-<span class="sourceLineNo">325</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>          throw new RuntimeException(e);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>        }<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      }<a name="line.328"></a>
-<span class="sourceLineNo">329</span>    };<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  }<a name="line.330"></a>
-<span class="sourceLineNo">331</span><a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private static PipelineAckStatusGetter createPipelineAckStatusGetter()<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      throws NoSuchMethodException {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    try {<a name="line.334"></a>
-<span class="sourceLineNo">335</span>      return createPipelineAckStatusGetter27();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    } catch (NoSuchMethodException e) {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>      LOG.debug("Can not get expected method " + e.getMessage() +<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          ", this usually because your Hadoop is pre 2.7.0, " +<a name="line.338"></a>
-<span class="sourceLineNo">339</span>          "try the methods in Hadoop 2.6.x instead.");<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    }<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    return createPipelineAckStatusGetter26();<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  }<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private static StorageTypeSetter createStorageTypeSetter() throws NoSuchMethodException {<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    Method setStorageTypeMethod =<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        OpWriteBlockProto.Builder.class.getMethod("setStorageType", StorageTypeProto.class);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    ImmutableMap.Builder&lt;String, StorageTypeProto&gt; builder = ImmutableMap.builder();<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    for (StorageTypeProto storageTypeProto : StorageTypeProto.values()) {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      builder.put(storageTypeProto.name(), storageTypeProto);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    }<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    ImmutableMap&lt;String, StorageTypeProto&gt; name2ProtoEnum = builder.build();<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    return new StorageTypeSetter() {<a name="line.352"></a>
-<span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>      @Override<a name="line.354"></a>
-<span class="sourceLineNo">355</span>      public OpWriteBlockProto.Builder set(OpWriteBlockProto.Builder builder, Enum&lt;?&gt; storageType) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>        Object protoEnum = name2ProtoEnum.get(storageType.name());<a name="line.356"></a>
-<span class="sourceLineNo">357</span>        try {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>          setStorageTypeMethod.invoke(builder, protoEnum);<a name="line.358"></a>
-<span class="sourceLineNo">359</span>        } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {<a name="line.359"></a>
-<span class="sourceLineNo">360</span>          throw new RuntimeException(e);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>        }<a name="line.361"></a>
-<span class="sourceLineNo">362</span>        return builder;<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      }<a name="line.363"></a>
-<span class="sourceLineNo">364</span>    };<a name="line.364"></a>
-<span class="sourceLineNo">365</span>  }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>  private static BlockAdder createBlockAdder() throws NoSuchMethodException {<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    for (Method method : ClientProtocol.class.getMethods()) {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      if (method.getName().equals("addBlock")) {<a name="line.369"></a>
-<span class="sourceLineNo">370</span>        Method addBlockMethod = method;<a name="line.370"></a>
-<span class="sourceLineNo">371</span>        Class&lt;?&gt;[] paramTypes = addBlockMethod.getParameterTypes();<a name="line.371"></a>
-<span class="sourceLineNo">372</span>        if (paramTypes[paramTypes.length - 1] == String[].class) {<a name="line.372"></a>
-<span class="sourceLineNo">373</span>          return new BlockAdder() {<a name="line.373"></a>
-<span class="sourceLineNo">374</span><a name="line.374"></a>
-<span class="sourceLineNo">375</span>            @Override<a name="line.375"></a>
-<span class="sourceLineNo">376</span>            public LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>                ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId,<a name="line.377"></a>
-<span class="sourceLineNo">378</span>                String[] favoredNodes) throws IOException {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>              try {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>                return (LocatedBlock) addBlockMethod.invoke(namenode, src, clientName, previous,<a name="line.380"></a>
-<span class="sourceLineNo">381</span>                  excludeNodes, fileId, favoredNodes);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>              } catch (IllegalAccessException e) {<a name="line.382"></a>
-<span class="sourceLineNo">383</span>                throw new RuntimeException(e);<a name="line.383"></a>
-<span class="sourceLineNo">384</span>              } catch (InvocationTargetException e) {<a name="line.384"></a>
-<span class="sourceLineNo">385</span>                Throwables.propagateIfPossible(e.getTargetException(), IOException.class);<a name="line.385"></a>
-<span class="sourceLineNo">386</span>                throw new RuntimeException(e);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>              }<a name="line.387"></a>
-<span class="sourceLineNo">388</span>            }<a name="line.388"></a>
-<span class="sourceLineNo">389</span>          };<a name="line.389"></a>
-<span class="sourceLineNo">390</span>        } else {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>          return new BlockAdder() {<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>            @Override<a name="line.393"></a>
-<span class="sourceLineNo">394</span>            public LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,<a name="line.394"></a>
-<span class="sourceLineNo">395</span>                ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId,<a name="line.395"></a>
-<span class="sourceLineNo">396</span>                String[] favoredNodes) throws IOException {<a name="line.396"></a>
-<span class="sourceLineNo">397</span>              try {<a name="line.397"></a>
-<span class="sourceLineNo">398</span>                return (LocatedBlock) addBlockMethod.invoke(namenode, src, clientName, previous,<a name="line.398"></a>
-<span class="sourceLineNo">399</span>                  excludeNodes, fileId, favoredNodes, null);<a name="line.399"></a>
-<span class="sourceLineNo">400</span>              } catch (IllegalAccessException e) {<a name="line.400"></a>
-<span class="sourceLineNo">401</span>                throw new RuntimeException(e);<a name="line.401"></a>
-<span class="sourceLineNo">402</span>              } catch (InvocationTargetException e) {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>                Throwables.propagateIfPossible(e.getTargetException(), IOException.class);<a name="line.403"></a>
-<span class="sourceLineNo">404</span>                throw new RuntimeException(e);<a name="line.404"></a>
-<span class="sourceLineNo">405</span>              }<a name="line.405"></a>
-<span class="sourceLineNo">406</span>            }<a name="line.406"></a>
-<span class="sourceLineNo">407</span>          };<a name="line.407"></a>
-<span class="sourceLineNo">408</span>        }<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      }<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    }<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    throw new NoSuchMethodException("Can not find addBlock method in ClientProtocol");<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  private static PBHelper createPBHelper() throws NoSuchMethodException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    Class&lt;?&gt; helperClass;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    String clazzName = "org.apache.hadoop.hdfs.protocolPB.PBHelperClient";<a name="line.416"></a>
-<span class="sourceLineNo">417</span>    try {<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      helperClass = Class.forName(clazzName);<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    } catch (ClassNotFoundException e) {<a name="line.419"></a>
-<span class="sourceLineNo">420</span>      helperClass = org.apache.hadoop.hdfs.protocolPB.PBHelper.class;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      LOG.debug("" + clazzName + " not found (Hadoop is pre-2.8.0?); using " +<a name="line.421"></a>
-<span class="sourceLineNo">422</span>          helperClass.toString() + " instead.");<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    }<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    Method convertEBMethod = helperClass.getMethod("convert", ExtendedBlock.class);<a name="line.424"></a>
-<span class="sourceLineNo">425</span>    Method convertTokenMethod = helperClass.getMethod("convert", Token.class);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    return new PBHelper() {<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>      @Override<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      public ExtendedBlockProto convert(ExtendedBlock b) {<a name="line.429"></a>
-<span class="sourceLineNo">430</span>        try {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>          return (ExtendedBlockProto) convertEBMethod.invoke(null, b);<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.432"></a>
-<span class="sourceLineNo">433</span>          throw new RuntimeException(e);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>        }<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      }<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>      @Override<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      public TokenProto convert(Token&lt;?&gt; tok) {<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        try {<a name="line.439"></a>
-<span class="sourceLineNo">440</span>          return (TokenProto) convertTokenMethod.invoke(null, tok);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>          throw new RuntimeException(e);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        }<a name="line.443"></a>
-<span class="sourceLineNo">444</span>      }<a name="line.444"></a>
-<span class="sourceLineNo">445</span>    };<a name="line.445"></a>
-<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>  private static ChecksumCreater createChecksumCreater28(Method getConfMethod, Class&lt;?&gt; confClass)<a name="line.448"></a>
-<span class="sourceLineNo">449</span>      throws NoSuchMethodException {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    for (Method method : confClass.getMethods()) {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      if (method.getName().equals("createChecksum")) {<a name="line.451"></a>
-<span class="sourceLineNo">452</span>        Method createChecksumMethod = method;<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        return new ChecksumCreater() {<a name="line.453"></a>
-<span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>          @Override<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          public DataChecksum createChecksum(DFSClient client) {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>            try {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>              return (DataChecksum) createChecksumMethod.invoke(getConfMethod.invoke(client),<a name="line.458"></a>
-<span class="sourceLineNo">459</span>                (Object) null);<a name="line.459"></a>
-<span class="sourceLineNo">460</span>            } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.460"></a>
-<span class="sourceLineNo">461</span>              throw new RuntimeException(e);<a name="line.461"></a>
-<span class="sourceLineNo">462</span>            }<a name="line.462"></a>
-<span class="sourceLineNo">463</span>          }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>        };<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      }<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    }<a name="line.466"></a>
-<span class="sourceLineNo">467</span>    throw new NoSuchMethodException("Can not find createChecksum method in DfsClientConf");<a name="line.467"></a>
-<span class="sourceLineNo">468</span>  }<a name="line.468"></a>
-<span class="sourceLineNo">469</span><a name="line.469"></a>
-<span class="sourceLineNo">470</span>  private static ChecksumCreater createChecksumCreater27(Method getConfMethod, Class&lt;?&gt; confClass)<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      throws NoSuchMethodException {<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    Method createChecksumMethod = confClass.getDeclaredMethod("createChecksum");<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    createChecksumMethod.setAccessible(true);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    return new ChecksumCreater() {<a name="line.474"></a>
-<span class="sourceLineNo">475</span><a name="line.475"></a>
-<span class="sourceLineNo">476</span>      @Override<a name="line.476"></a>
-<span class="sourceLineNo">477</span>      public DataChecksum createChecksum(DFSClient client) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>        try {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>          return (DataChecksum) createChecksumMethod.invoke(getConfMethod.invoke(client));<a name="line.479"></a>
-<span class="sourceLineNo">480</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          throw new RuntimeException(e);<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      }<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    };<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  }<a name="line.485"></a>
-<span class="sourceLineNo">486</span><a name="line.486"></a>
-<span class="sourceLineNo">487</span>  private static ChecksumCreater createChecksumCreater()<a name="line.487"></a>
-<span class="sourceLineNo">488</span>      throws NoSuchMethodException, ClassNotFoundException {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    Method getConfMethod = DFSClient.class.getMethod("getConf");<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    try {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      return createChecksumCreater28(getConfMethod,<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        Class.forName("org.apache.hadoop.hdfs.client.impl.DfsClientConf"));<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    } catch (ClassNotFoundException e) {<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      LOG.debug("No DfsClientConf class found, should be hadoop 2.7-", e);<a name="line.494"></a>
-<span class="sourceLineNo">495</span>    }<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    return createChecksumCreater27(getConfMethod,<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      Class.forName("org.apache.hadoop.hdfs.DFSClient$Conf"));<a name="line.497"></a>
-<span class="sourceLineNo">498</span>  }<a name="line.498"></a>
-<span class="sourceLineNo">499</span><a name="line.499"></a>
-<span class="sourceLineNo">500</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      CryptoProtocolVersion[].class, String.class);<a name="line.503"></a>
-<span class="sourceLineNo">504</span><a name="line.504"></a>
-<span class="sourceLineNo">505</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        supportedVersions) -&gt; {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.507"></a>
-<span class="sourceLineNo">508</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    };<a name="line.509"></a>
-<span class="sourceLineNo">510</span>  }<a name="line.510"></a>
-<span class="sourceLineNo">511</span><a name="line.511"></a>
-<span class="sourceLineNo">512</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      CryptoProtocolVersion[].class);<a name="line.515"></a>
-<span class="sourceLineNo">516</span><a name="line.516"></a>
-<span class="sourceLineNo">517</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.517"></a>
-<span class="sourceLineNo">518</span>        supportedVersions) -&gt; {<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.519"></a>
-<span class="sourceLineNo">520</span>        createParent, replication, blockSize, supportedVersions);<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    };<a name="line.521"></a>
-<span class="sourceLineNo">522</span>  }<a name="line.522"></a>
-<span class="sourceLineNo">523</span><a name="line.523"></a>
-<span class="sourceLineNo">524</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>    try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>      return createFileCreator3();<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    } catch (NoSuchMethodException e) {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    return createFileCreator2();<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  }<a name="line.531"></a>
-<span class="sourceLineNo">532</span><a name="line.532"></a>
-<span class="sourceLineNo">533</span>  // cancel the processing if DFSClient is already closed.<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>    private final DFSClient client;<a name="line.536"></a>
-<span class="sourceLineNo">537</span><a name="line.537"></a>
-<span class="sourceLineNo">538</span>    public CancelOnClose(DFSClient client) {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>      this.client = client;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    }<a name="line.540"></a>
-<span class="sourceLineNo">541</span><a name="line.541"></a>
-<span class="sourceLineNo">542</span>    @Override<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    public boolean progress() {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  static {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    try {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      PIPELINE_ACK_STATUS_GETTER = createPipelineAckStatusGetter();<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      STORAGE_TYPE_SETTER = createStorageTypeSetter();<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      BLOCK_ADDER = createBlockAdder();<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      LEASE_MANAGER = createLeaseManager();<a name="line.553"></a>
-<span class="sourceLineNo">554</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      PB_HELPER = createPBHelper();<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      CHECKSUM_CREATER = createChecksumCreater();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>      FILE_CREATOR = createFileCreator();<a name="line.557"></a>
-<span class="sourceLineNo">558</span>    } catch (Exception e) {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.559"></a>
-<span class="sourceLineNo">560</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.560"></a>
-<span class="sourceLineNo">561</span>          "HBASE-16110 for more information.";<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      LOG.error(msg, e);<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      throw new Error(msg, e);<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    }<a name="line.564"></a>
-<span class="sourceLineNo">565</span>  }<a name="line.565"></a>
-<span class="sourceLineNo">566</span><a name="line.566"></a>
-<span class="sourceLineNo">567</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.568"></a>
-<span class="sourceLineNo">569</span>  }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.572"></a>
-<span class="sourceLineNo">573</span>  }<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    return CHECKSUM_CREATER.createChecksum(client);<a name="line.576"></a>
-<span class="sourceLineNo">577</span>  }<a name="line.577"></a>
-<span class="sourceLineNo">578</span><a name="line.578"></a>
-<span class="sourceLineNo">579</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    return PIPELINE_ACK_STATUS_GETTER.get(ack);<a name="line.580"></a>
-<span class="sourceLineNo">581</span>  }<a name="line.581"></a>
-<span class="sourceLineNo">582</span><a name="line.582"></a>
-<span class="sourceLineNo">583</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.583"></a>
-<span class="sourceLineNo">584</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.584"></a>
-<span class="sourceLineNo">585</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      new ProtobufVarint32FrameDecoder(),<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>        @Override<a name="line.590"></a>
-<span class="sourceLineNo">591</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>            throws Exception {<a name="line.592"></a>
-<span class="sourceLineNo">593</span>          Status pipelineStatus = resp.getStatus();<a name="line.593"></a>
-<span class="sourceLineNo">594</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.595"></a>
-<span class="sourceLineNo">596</span>          }<a name="line.596"></a>
-<span class="sourceLineNo">597</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.600"></a>
-<span class="sourceLineNo">601</span>                  resp.getMessage() + ", " + logInfo);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>            } else {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.603"></a>
-<span class="sourceLineNo">604</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>            }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>          }<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          // success<a name="line.607"></a>
-<span class="sourceLineNo">608</span>          ChannelPipeline p = ctx.pipeline();<a name="line.608"></a>
-<span class="sourceLineNo">609</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.610"></a>
-<span class="sourceLineNo">611</span>            // of pipeline.<a name="line.611"></a>
-<span class="sourceLineNo">612</span>            if (handler instanceof IdleStateHandler) {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>              break;<a name="line.613"></a>
-<span class="sourceLineNo">614</span>            }<a name="line.614"></a>
-<span class="sourceLineNo">615</span>          }<a name="line.615"></a>
-<span class="sourceLineNo">616</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.616"></a>
-<span class="sourceLineNo">617</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.617"></a>
-<span class="sourceLineNo">618</span>          ctx.channel().config().setAutoRead(false);<a name="line.618"></a>
-<span class="sourceLineNo">619</span>          promise.trySuccess(ctx.channel());<a name="line.619"></a>
-<span class="sourceLineNo">620</span>        }<a name="line.620"></a>
-<span class="sourceLineNo">621</span><a name="line.621"></a>
-<span class="sourceLineNo">622</span>        @Override<a name="line.622"></a>
-<span class="sourceLineNo">623</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.624"></a>
-<span class="sourceLineNo">625</span>        }<a name="line.625"></a>
-<span class="sourceLineNo">626</span><a name="line.626"></a>
-<span class="sourceLineNo">627</span>        @Override<a name="line.627"></a>
-<span class="sourceLineNo">628</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.628"></a>
-<span class="sourceLineNo">629</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.629"></a>
-<span class="sourceLineNo">630</span>            promise<a name="line.630"></a>
-<span class="sourceLineNo">631</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          } else {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>            super.userEventTriggered(ctx, evt);<a name="line.633"></a>
-<span class="sourceLineNo">634</span>          }<a name="line.634"></a>
-<span class="sourceLineNo">635</span>        }<a name="line.635"></a>
-<span class="sourceLineNo">636</span><a name="line.636"></a>
-<span class="sourceLineNo">637</span>        @Override<a name="line.637"></a>
-<span class="sourceLineNo">638</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.638"></a>
-<span class="sourceLineNo">639</span>          promise.tryFailure(cause);<a name="line.639"></a>
-<span class="sourceLineNo">640</span>        }<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      });<a name="line.641"></a>
-<span class="sourceLineNo">642</span>  }<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>  private static void requestWriteBlock(Channel channel, Enum&lt;?&gt; storageType,<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.645"></a>
-<span class="sourceLineNo">646</span>    OpWriteBlockProto proto = STORAGE_TYPE_SETTER.set(writeBlockProtoBuilder, storageType).build();<a name="line.646"></a>
-<span class="sourceLineNo">647</span>    int protoLen = proto.getSerializedSize();<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    ByteBuf buffer =<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.650"></a>
-<span class="sourceLineNo">651</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.651"></a>
-<span class="sourceLineNo">652</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    channel.writeAndFlush(buffer);<a name="line.653"></a>
-<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.656"></a>
-<span class="sourceLineNo">657</span>      Enum&lt;?&gt; storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      throws IOException {<a name="line.659"></a>
-<span class="sourceLineNo">660</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.662"></a>
-<span class="sourceLineNo">663</span><a name="line.663"></a>
-<span class="sourceLineNo">664</span>      @Override<a name="line.664"></a>
-<span class="sourceLineNo">665</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        if (future.isSuccess()) {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>          // setup response processing pipeline first, then send request.<a name="line.667"></a>
-<span class="sourceLineNo">668</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.669"></a>
-<span class="sourceLineNo">670</span>        } else {<a name="line.670"></a>
-<span class="sourceLineNo">671</span>          promise.tryFailure(future.cause());<a name="line.671"></a>
-<span class="sourceLineNo">672</span>        }<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    });<a name="line.674"></a>
-<span class="sourceLineNo">675</span>  }<a name="line.675"></a>
-<span class="sourceLineNo">676</span><a name="line.676"></a>
-<span class="sourceLineNo">677</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.677"></a>
-<span class="sourceLineNo">678</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.678"></a>
-<span class="sourceLineNo">679</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    Enum&lt;?&gt;[] storageTypes = locatedBlock.getStorageTypes();<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.682"></a>
-<span class="sourceLineNo">683</span>    boolean connectToDnViaHostname =<a name="line.683"></a>
-<span class="sourceLineNo">684</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.688"></a>
-<span class="sourceLineNo">689</span>        .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PB_HELPER.convert(blockCopy))<a name="line.689"></a>
-<span class="sourceLineNo">690</span>            .setToken(PB_HELPER.convert(locatedBlock.getBlockToken())))<a name="line.690"></a>
-<span class="sourceLineNo">691</span>        .setClientName(clientName).build();<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.693"></a>
-<span class="sourceLineNo">694</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.694"></a>
-<span class="sourceLineNo">695</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.695"></a>
-<span class="sourceLineNo">696</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.696"></a>
-<span class="sourceLineNo">697</span>        .setRequestedChecksum(checksumProto)<a name="line.697"></a>
-<span class="sourceLineNo">698</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.700"></a>
-<span class="sourceLineNo">701</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.701"></a>
-<span class="sourceLineNo">702</span>      Enum&lt;?&gt; storageType = storageTypes[i];<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      futureList.add(promise);<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.705"></a>
-<span class="sourceLineNo">706</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.706"></a>
-<span class="sourceLineNo">707</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.707"></a>
-<span class="sourceLineNo">708</span><a name="line.708"></a>
-<span class="sourceLineNo">709</span>            @Override<a name="line.709"></a>
-<span class="sourceLineNo">710</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.710"></a>
-<span class="sourceLineNo">711</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.711"></a>
-<span class="sourceLineNo">712</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.712"></a>
-<span class="sourceLineNo">713</span>              // a null handler.<a name="line.713"></a>
-<span class="sourceLineNo">714</span>            }<a name="line.714"></a>
-<span class="sourceLineNo">715</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.715"></a>
-<span class="sourceLineNo">716</span><a name="line.716"></a>
-<span class="sourceLineNo">717</span>            @Override<a name="line.717"></a>
-<span class="sourceLineNo">718</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.718"></a>
-<span class="sourceLineNo">719</span>              if (future.isSuccess()) {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.720"></a>
-<span class="sourceLineNo">721</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.721"></a>
-<span class="sourceLineNo">722</span>              } else {<a name="line.722"></a>
-<span class="sourceLineNo">723</span>                promise.tryFailure(future.cause());<a name="line.723"></a>
-<span class="sourceLineNo">724</span>              }<a name="line.724"></a>
-<span class="sourceLineNo">725</span>            }<a name="line.725"></a>
-<span class="sourceLineNo">726</span>          });<a name="line.726"></a>
-<span class="sourceLineNo">727</span>    }<a name="line.727"></a>
-<span class="sourceLineNo">728</span>    return futureList;<a name="line.728"></a>
-<span class="sourceLineNo">729</span>  }<a name="line.729"></a>
-<span class="sourceLineNo">730</span><a name="line.730"></a>
-<span class="sourceLineNo">731</span>  /**<a name="line.731"></a>
-<span class="sourceLineNo">732</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.732"></a>
-<span class="sourceLineNo">733</span>   */<a name="line.733"></a>
-<span class="sourceLineNo">734</span>  public static class NameNodeException extends IOException {<a name="line.734"></a>
-<span class="sourceLineNo">735</span><a name="line.735"></a>
-<span class="sourceLineNo">736</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.736"></a>
-<span class="sourceLineNo">737</span><a name="line.737"></a>
-<span class="sourceLineNo">738</span>    public NameNodeException(Throwable cause) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>      super(cause);<a name="line.739"></a>
-<span class="sourceLineNo">740</span>    }<a name="line.740"></a>
-<span class="sourceLineNo">741</span>  }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.743"></a>
-<span class="sourceLineNo">744</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.744"></a>
-<span class="sourceLineNo">745</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.745"></a>
-<span class="sourceLineNo">746</span>    Configuration conf = dfs.getConf();<a name="line.746"></a>
-<span class="sourceLineNo">747</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    DFSClient client = dfs.getClient();<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    String clientName = client.getClientName();<a name="line.749"></a>
-<span class="sourceLineNo">750</span>    ClientProtocol namenode = client.getNamenode();<a name="line.750"></a>
-<span class="sourceLineNo">751</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.751"></a>
-<span class="sourceLineNo">752</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.752"></a>
-<span class="sourceLineNo">753</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    for (int retry = 0;; retry++) {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>      HdfsFileStatus stat;<a name="line.755"></a>
-<span class="sourceLineNo">756</span>      try {<a name="line.756"></a>
-<span class="sourceLineNo">757</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.757"></a>
-<span class="sourceLineNo">758</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.758"></a>
-<span class="sourceLineNo">759</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.759"></a>
-<span class="sourceLineNo">760</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      } catch (Exception e) {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>        if (e instanceof RemoteException) {<a name="line.762"></a>
-<span class="sourceLineNo">763</span>          throw (RemoteException) e;<a name="line.763"></a>
-<span class="sourceLineNo">764</span>        } else {<a name="line.764"></a>
-<span class="sourceLineNo">765</span>          throw new NameNodeException(e);<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        }<a name="line.766"></a>
-<span class="sourceLineNo">767</span>      }<a name="line.767"></a>
-<span class="sourceLineNo">768</span>      beginFileLease(client, stat.getFileId());<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      boolean succ = false;<a name="line.769"></a>
-<span class="sourceLineNo">770</span>      LocatedBlock locatedBlock = null;<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.771"></a>
-<span class="sourceLineNo">772</span>      try {<a name="line.772"></a>
-<span class="sourceLineNo">773</span>        DataChecksum summer = createChecksum(client);<a name="line.773"></a>
-<span class="sourceLineNo">774</span>        locatedBlock = BLOCK_ADDER.addBlock(namenode, src, client.getClientName(), null,<a name="line.774"></a>
-<span class="sourceLineNo">775</span>          excludesNodes, stat.getFileId(), null);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.776"></a>
-<span class="sourceLineNo">777</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.777"></a>
-<span class="sourceLineNo">778</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.778"></a>
-<span class="sourceLineNo">779</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.779"></a>
-<span class="sourceLineNo">780</span>          try {<a name="line.780"></a>
-<span class="sourceLineNo">781</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.781"></a>
-<span class="sourceLineNo">782</span>          } catch (Exception e) {<a name="line.782"></a>
-<span class="sourceLineNo">783</span>            // exclude the broken DN next time<a name="line.783"></a>
-<span class="sourceLineNo">784</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.784"></a>
-<span class="sourceLineNo">785</span>            throw e;<a name="line.785"></a>
-<span class="sourceLineNo">786</span>          }<a name="line.786"></a>
-<span class="sourceLineNo">787</span>        }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.788"></a>
-<span class="sourceLineNo">789</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.789"></a>
-<span class="sourceLineNo">790</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.790"></a>
-<span class="sourceLineNo">791</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.791"></a>
-<span class="sourceLineNo">792</span>        succ = true;<a name="line.792"></a>
-<span class="sourceLineNo">793</span>        return output;<a name="line.793"></a>
-<span class="sourceLineNo">794</span>      } catch (RemoteException e) {<a name="line.794"></a>
-<span class="sourceLineNo">795</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.795"></a>
-<span class="sourceLineNo">796</span>        if (shouldRetryCreate(e)) {<a name="line.796"></a>
-<span class="sourceLineNo">797</span>          if (retry &gt;= createMaxRetries) {<a name="line.797"></a>
-<span class="sourceLineNo">798</span>            throw e.unwrapRemoteException();<a name="line.798"></a>
-<span class="sourceLineNo">799</span>          }<a name="line.799"></a>
-<span class="sourceLineNo">800</span>        } else {<a name="line.800"></a>
-<span class="sourceLineNo">801</span>          throw e.unwrapRemoteException();<a name="line.801"></a>
-<span class="sourceLineNo">802</span>        }<a name="line.802"></a>
-<span class="sourceLineNo">803</span>      } catch (IOException e) {<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.804"></a>
-<span class="sourceLineNo">805</span>        if (retry &gt;= createMaxRetries) {<a name="line.805"></a>
-<span class="sourceLineNo">806</span>          throw e;<a name="line.806"></a>
-<span class="sourceLineNo">807</span>        }<a name="line.807"></a>
-<span class="sourceLineNo">808</span>        // overwrite the old broken file.<a name="line.808"></a>
-<span class="sourceLineNo">809</span>        overwrite = true;<a name="line.809"></a>
-<span class="sourceLineNo">810</span>        try {<a name="line.810"></a>
-<span class="sourceLineNo">811</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.811"></a>
-<span class="sourceLineNo">812</span>        } catch (InterruptedException ie) {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>          throw new InterruptedIOException();<a name="line.813"></a>
-<span class="sourceLineNo">814</span>        }<a name="line.814"></a>
-<span class="sourceLineNo">815</span>      } finally {<a name="line.815"></a>
-<span class="sourceLineNo">816</span>        if (!succ) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>          if (futureList != null) {<a name="line.817"></a>
-<span class="sourceLineNo">818</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.818"></a>
-<span class="sourceLineNo">819</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.819"></a>
-<span class="sourceLineNo">820</span><a name="line.820"></a>
-<span class="sourceLineNo">821</span>                @Override<a name="line.821"></a>
-<span class="sourceLineNo">822</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>                  if (future.isSuccess()) {<a name="line.823"></a>
-<span class="sourceLineNo">824</span>                    future.getNow().close();<a name="line.824"></a>
-<span class="sourceLineNo">825</span>                  }<a name="line.825"></a>
-<span class="sourceLineNo">826</span>                }<a name="line.826"></a>
-<span class="sourceLineNo">827</span>              });<a name="line.827"></a>
-<span class="sourceLineNo">828</span>            }<a name="line.828"></a>
-<span class="sourceLineNo">829</span>          }<a name="line.829"></a>
-<span class="sourceLineNo">830</span>          endFileLease(client, stat.getFileId());<a name="line.830"></a>
-<span class="sourceLineNo">831</span>        }<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      }<a name="line.832"></a>
-<span class="sourceLineNo">833</span>    }<a name="line.833"></a>
-<span class="sourceLineNo">834</span>  }<a name="line.834"></a>
-<span class="sourceLineNo">835</span><a name="line.835"></a>
-<span class="sourceLineNo">836</span>  /**<a name="line.836"></a>
-<span class="sourceLineNo">837</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.837"></a>
-<span class="sourceLineNo">838</span>   * inside an {@link EventLoop}.<a name="line.838"></a>
-<span class="sourceLineNo">839</span>   */<a name="line.839"></a>
-<span class="sourceLineNo">840</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.840"></a>
-<span class="sourceLineNo">841</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.841"></a>
-<span class="sourceLineNo">842</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.842"></a>
-<span class="sourceLineNo">843</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.843"></a>
-<span class="sourceLineNo">844</span><a name="line.844"></a>
-<span class="sourceLineNo">845</span>      @Override<a name="line.845"></a>
-<span class="sourceLineNo">846</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.846"></a>
-<span class="sourceLineNo">847</span>          throws IOException, UnresolvedLinkException {<a name="line.847"></a>
-<span class="sourceLineNo">848</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          blockSize, eventLoopGroup, channelClass);<a name="line.849"></a>
-<span class="sourceLineNo">850</span>      }<a name="line.850"></a>
-<span class="sourceLineNo">851</span><a name="line.851"></a>
-<span class="sourceLineNo">852</span>      @Override<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.853"></a>
-<span class="sourceLineNo">854</span>        throw new UnsupportedOperationException();<a name="line.854"></a>
-<span class="sourceLineNo">855</span>      }<a name="line.855"></a>
-<span class="sourceLineNo">856</span>    }.resolve(dfs, f);<a name="line.856"></a>
-<span class="sourceLineNo">857</span>  }<a name="line.857"></a>
-<span class="sourceLineNo">858</span><a name="line.858"></a>
-<span class="sourceLineNo">859</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.859"></a>
-<span class="sourceLineNo">860</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.861"></a>
-<span class="sourceLineNo">862</span>    // DFSOutputStream.newStreamForCreate.<a name="line.862"></a>
-<span class="sourceLineNo">863</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.863"></a>
-<span class="sourceLineNo">864</span>  }<a name="line.864"></a>
-<span class="sourceLineNo">865</span><a name="line.865"></a>
-<span class="sourceLineNo">866</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.866"></a>
-<span class="sourceLineNo">867</span>      ExtendedBlock block, long fileId) {<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    for (int retry = 0;; retry++) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      try {<a name="line.869"></a>
-<span class="sourceLineNo">870</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.870"></a>
-<span class="sourceLineNo">871</span>          endFileLease(client, fileId);<a name="line.871"></a>
-<span class="sourceLineNo">872</span>          return;<a name="line.872"></a>
-<span class="sourceLineNo">873</span>        } else {<a name="line.873"></a>
-<span class="sourceLineNo">874</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.874"></a>
-<span class="sourceLineNo">875</span>        }<a name="line.875"></a>
-<span class="sourceLineNo">876</span>      } catch (RemoteException e) {<a name="line.876"></a>
-<span class="sourceLineNo">877</span>        IOException ioe = e.unwrapRemoteException();<a name="line.877"></a>
-<span class="sourceLineNo">878</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.879"></a>
-<span class="sourceLineNo">880</span>          return;<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        } else {<a name="line.881"></a>
-<span class="sourceLineNo">882</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.882"></a>
-<span class="sourceLineNo">883</span>        }<a name="line.883"></a>
-<span class="sourceLineNo">884</span>      } catch (Exception e) {<a name="line.884"></a>
-<span class="sourceLineNo">885</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.885"></a>
-<span class="sourceLineNo">886</span>      }<a name="line.886"></a>
-<span class="sourceLineNo">887</span>      sleepIgnoreInterrupt(retry);<a name="line.887"></a>
-<span class="sourceLineNo">888</span>    }<a name="line.888"></a>
-<span class="sourceLineNo">889</span>  }<a name="line.889"></a>
-<span class="sourceLineNo">890</span><a name="line.890"></a>
-<span class="sourceLineNo">891</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.891"></a>
-<span class="sourceLineNo">892</span>    try {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.893"></a>
-<span class="sourceLineNo">894</span>    } catch (InterruptedException e) {<a name="line.894"></a>
-<span class="sourceLineNo">895</span>    }<a name="line.895"></a>
-<span class="sourceLineNo">896</span>  }<a name="line.896"></a>
-<span class="sourceLineNo">897</span>}<a name="line.897"></a>
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-</pre>
-</div>
-</body>
-</html>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
index eb6a26e..6684af5 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
@@ -29,9 +29,9 @@
 <span class="sourceLineNo">021</span>import static org.apache.hadoop.fs.CreateFlag.OVERWRITE;<a name="line.21"></a>
 <span class="sourceLineNo">022</span>import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.createEncryptor;<a name="line.22"></a>
 <span class="sourceLineNo">023</span>import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.trySaslNegotiate;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;<a name="line.26"></a>
+<span class="sourceLineNo">024</span>import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import static org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage.PIPELINE_SETUP_CREATE;<a name="line.27"></a>
 <span class="sourceLineNo">028</span>import static org.apache.hbase.thirdparty.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;<a name="line.28"></a>
 <span class="sourceLineNo">029</span>import static org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE;<a name="line.29"></a>
@@ -53,856 +53,584 @@
 <span class="sourceLineNo">045</span>import org.apache.hadoop.fs.FileSystem;<a name="line.45"></a>
 <span class="sourceLineNo">046</span>import org.apache.hadoop.fs.FileSystemLinkResolver;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import org.apache.hadoop.fs.Path;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.fs.UnresolvedLinkException;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hdfs.DFSOutputStream;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hdfs.protocol.ClientProtocol;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hdfs.protocol.DatanodeInfo;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hdfs.protocol.ExtendedBlock;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hdfs.protocol.LocatedBlock;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hdfs.protocol.datatransfer.Op;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.io.EnumSetWritable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.net.NetUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;<a name="line.82"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.fs.StorageType;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.fs.UnresolvedLinkException;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.fs.permission.FsPermission;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hdfs.DFSClient;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hdfs.DFSOutputStream;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hdfs.DistributedFileSystem;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hdfs.protocol.ClientProtocol;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hdfs.protocol.DatanodeInfo;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hdfs.protocol.ExtendedBlock;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hdfs.protocol.LocatedBlock;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtoUtil;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hdfs.protocol.datatransfer.Op;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck.ECN;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BlockOpResponseProto;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ChecksumProto;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.io.EnumSetWritable;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.net.NetUtils;<a name="line.82"></a>
 <span class="sourceLineNo">083</span>import org.apache.hadoop.security.token.Token;<a name="line.83"></a>
 <span class="sourceLineNo">084</span>import org.apache.hadoop.util.DataChecksum;<a name="line.84"></a>
 <span class="sourceLineNo">085</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.85"></a>
 <span class="sourceLineNo">086</span>import org.slf4j.Logger;<a name="line.86"></a>
 <span class="sourceLineNo">087</span>import org.slf4j.LoggerFactory;<a name="line.87"></a>
 <span class="sourceLineNo">088</span><a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hbase.thirdparty.com.google.common.base.Throwables;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.io.netty.bootstrap.Bootstrap;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufAllocator;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufOutputStream;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.io.netty.buffer.PooledByteBufAllocator;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.io.netty.channel.Channel;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelFuture;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelFutureListener;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandler;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelInitializer;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelPipeline;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hbase.thirdparty.io.netty.channel.EventLoop;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufDecoder;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateHandler;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.Future;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.FutureListener;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise;<a name="line.112"></a>
-<span class="sourceLineNo">113</span><a name="line.113"></a>
-<span class="sourceLineNo">114</span>/**<a name="line.114"></a>
-<span class="sourceLineNo">115</span> * Helper class for implementing {@link FanOutOneBlockAsyncDFSOutput}.<a name="line.115"></a>
-<span class="sourceLineNo">116</span> */<a name="line.116"></a>
-<span class="sourceLineNo">117</span>@InterfaceAudience.Private<a name="line.117"></a>
-<span class="sourceLineNo">118</span>public final class FanOutOneBlockAsyncDFSOutputHelper {<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  private static final Logger LOG =<a name="line.119"></a>
-<span class="sourceLineNo">120</span>      LoggerFactory.getLogger(FanOutOneBlockAsyncDFSOutputHelper.class);<a name="line.120"></a>
-<span class="sourceLineNo">121</span><a name="line.121"></a>
-<span class="sourceLineNo">122</span>  private FanOutOneBlockAsyncDFSOutputHelper() {<a name="line.122"></a>
-<span class="sourceLineNo">123</span>  }<a name="line.123"></a>
+<span class="sourceLineNo">089</span>import org.apache.hbase.thirdparty.io.netty.bootstrap.Bootstrap;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufAllocator;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufOutputStream;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.io.netty.buffer.PooledByteBufAllocator;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.io.netty.channel.Channel;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelFuture;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelFutureListener;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandler;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelInitializer;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hbase.thirdparty.io.netty.channel.ChannelPipeline;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hbase.thirdparty.io.netty.channel.EventLoop;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufDecoder;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateHandler;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.Future;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.FutureListener;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise;<a name="line.110"></a>
+<span class="sourceLineNo">111</span><a name="line.111"></a>
+<span class="sourceLineNo">112</span>/**<a name="line.112"></a>
+<span class="sourceLineNo">113</span> * Helper class for implementing {@link FanOutOneBlockAsyncDFSOutput}.<a name="line.113"></a>
+<span class="sourceLineNo">114</span> */<a name="line.114"></a>
+<span class="sourceLineNo">115</span>@InterfaceAudience.Private<a name="line.115"></a>
+<span class="sourceLineNo">116</span>public final class FanOutOneBlockAsyncDFSOutputHelper {<a name="line.116"></a>
+<span class="sourceLineNo">117</span>  private static final Logger LOG =<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      LoggerFactory.getLogger(FanOutOneBlockAsyncDFSOutputHelper.class);<a name="line.118"></a>
+<span class="sourceLineNo">119</span><a name="line.119"></a>
+<span class="sourceLineNo">120</span>  private FanOutOneBlockAsyncDFSOutputHelper() {<a name="line.120"></a>
+<span class="sourceLineNo">121</span>  }<a name="line.121"></a>
+<span class="sourceLineNo">122</span><a name="line.122"></a>
+<span class="sourceLineNo">123</span>  public static final String ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES = "hbase.fs.async.create.retries";<a name="line.123"></a>
 <span class="sourceLineNo">124</span><a name="line.124"></a>
-<span class="sourceLineNo">125</span>  public static final String ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES = "hbase.fs.async.create.retries";<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  public static final int DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES = 10;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  // use pooled allocator for performance.<a name="line.128"></a>
-<span class="sourceLineNo">129</span>  private static final ByteBufAllocator ALLOC = PooledByteBufAllocator.DEFAULT;<a name="line.129"></a>
-<span class="sourceLineNo">130</span><a name="line.130"></a>
-<span class="sourceLineNo">131</span>  // copied from DFSPacket since it is package private.<a name="line.131"></a>
-<span class="sourceLineNo">132</span>  public static final long HEART_BEAT_SEQNO = -1L;<a name="line.132"></a>
-<span class="sourceLineNo">133</span><a name="line.133"></a>
-<span class="sourceLineNo">134</span>  // Timeouts for communicating with DataNode for streaming writes/reads<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  public static final int READ_TIMEOUT = 60 * 1000;<a name="line.135"></a>
+<span class="sourceLineNo">125</span>  public static final int DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES = 10;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  // use pooled allocator for performance.<a name="line.126"></a>
+<span class="sourceLineNo">127</span>  private static final ByteBufAllocator ALLOC = PooledByteBufAllocator.DEFAULT;<a name="line.127"></a>
+<span class="sourceLineNo">128</span><a name="line.128"></a>
+<span class="sourceLineNo">129</span>  // copied from DFSPacket since it is package private.<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  public static final long HEART_BEAT_SEQNO = -1L;<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  // Timeouts for communicating with DataNode for streaming writes/reads<a name="line.132"></a>
+<span class="sourceLineNo">133</span>  public static final int READ_TIMEOUT = 60 * 1000;<a name="line.133"></a>
+<span class="sourceLineNo">134</span><a name="line.134"></a>
+<span class="sourceLineNo">135</span>  private static final DatanodeInfo[] EMPTY_DN_ARRAY = new DatanodeInfo[0];<a name="line.135"></a>
 <span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>  private static final DatanodeInfo[] EMPTY_DN_ARRAY = new DatanodeInfo[0];<a name="line.137"></a>
+<span class="sourceLineNo">137</span>  private interface LeaseManager {<a name="line.137"></a>
 <span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>  // helper class for getting Status from PipelineAckProto. In hadoop 2.6 or before, there is a<a name="line.139"></a>
-<span class="sourceLineNo">140</span>  // getStatus method, and for hadoop 2.7 or after, the status is retrieved from flag. The flag may<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  // get from proto directly, or combined by the reply field of the proto and a ECN object. See<a name="line.141"></a>
-<span class="sourceLineNo">142</span>  // createPipelineAckStatusGetter for more details.<a name="line.142"></a>
-<span class="sourceLineNo">143</span>  private interface PipelineAckStatusGetter {<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    Status get(PipelineAckProto ack);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  private static final PipelineAckStatusGetter PIPELINE_ACK_STATUS_GETTER;<a name="line.147"></a>
-<span class="sourceLineNo">148</span><a name="line.148"></a>
-<span class="sourceLineNo">149</span>  // StorageType enum is placed under o.a.h.hdfs in hadoop 2.6 and o.a.h.fs in hadoop 2.7. So here<a name="line.149"></a>
-<span class="sourceLineNo">150</span>  // we need to use reflection to set it.See createStorageTypeSetter for more details.<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  private interface StorageTypeSetter {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    OpWriteBlockProto.Builder set(OpWriteBlockProto.Builder builder, Enum&lt;?&gt; storageType);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
+<span class="sourceLineNo">139</span>    void begin(DFSClient client, long inodeId);<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>    void end(DFSClient client, long inodeId);<a name="line.141"></a>
+<span class="sourceLineNo">142</span>  }<a name="line.142"></a>
+<span class="sourceLineNo">143</span><a name="line.143"></a>
+<span class="sourceLineNo">144</span>  private static final LeaseManager LEASE_MANAGER;<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span>  // This is used to terminate a recoverFileLease call when FileSystem is already closed.<a name="line.146"></a>
+<span class="sourceLineNo">147</span>  // isClientRunning is not public so we need to use reflection.<a name="line.147"></a>
+<span class="sourceLineNo">148</span>  private interface DFSClientAdaptor {<a name="line.148"></a>
+<span class="sourceLineNo">149</span><a name="line.149"></a>
+<span class="sourceLineNo">150</span>    boolean isClientRunning(DFSClient client);<a name="line.150"></a>
+<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
+<span class="sourceLineNo">152</span><a name="line.152"></a>
+<span class="sourceLineNo">153</span>  private static final DFSClientAdaptor DFS_CLIENT_ADAPTOR;<a name="line.153"></a>
 <span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>  private static final StorageTypeSetter STORAGE_TYPE_SETTER;<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>  // helper class for calling add block method on namenode. There is a addBlockFlags parameter for<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  // hadoop 2.8 or later. See createBlockAdder for more details.<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  private interface BlockAdder {<a name="line.159"></a>
-<span class="sourceLineNo">160</span><a name="line.160"></a>
-<span class="sourceLineNo">161</span>    LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,<a name="line.161"></a>
-<span class="sourceLineNo">162</span>        ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId, String[] favoredNodes)<a name="line.162"></a>
-<span class="sourceLineNo">163</span>        throws IOException;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  }<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>  private static final BlockAdder BLOCK_ADDER;<a name="line.166"></a>
-<span class="sourceLineNo">167</span><a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private interface LeaseManager {<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>    void begin(DFSClient client, long inodeId);<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>    void end(DFSClient client, long inodeId);<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>  private static final LeaseManager LEASE_MANAGER;<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>  // This is used to terminate a recoverFileLease call when FileSystem is already closed.<a name="line.177"></a>
-<span class="sourceLineNo">178</span>  // isClientRunning is not public so we need to use reflection.<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  private interface DFSClientAdaptor {<a name="line.179"></a>
-<span class="sourceLineNo">180</span><a name="line.180"></a>
-<span class="sourceLineNo">181</span>    boolean isClientRunning(DFSClient client);<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  }<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>  private static final DFSClientAdaptor DFS_CLIENT_ADAPTOR;<a name="line.184"></a>
-<span class="sourceLineNo">185</span><a name="line.185"></a>
-<span class="sourceLineNo">186</span>  // helper class for convert protos.<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  private interface PBHelper {<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>    ExtendedBlockProto convert(ExtendedBlock b);<a name="line.189"></a>
-<span class="sourceLineNo">190</span><a name="line.190"></a>
-<span class="sourceLineNo">191</span>    TokenProto convert(Token&lt;?&gt; tok);<a name="line.191"></a>
-<span class="sourceLineNo">192</span>  }<a name="line.192"></a>
-<span class="sourceLineNo">193</span><a name="line.193"></a>
-<span class="sourceLineNo">194</span>  private static final PBHelper PB_HELPER;<a name="line.194"></a>
+<span class="sourceLineNo">155</span>  // helper class for creating files.<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  private interface FileCreator {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    default HdfsFileStatus create(ClientProtocol instance, String src, FsPermission masked,<a name="line.157"></a>
+<span class="sourceLineNo">158</span>        String clientName, EnumSetWritable&lt;CreateFlag&gt; flag, boolean createParent,<a name="line.158"></a>
+<span class="sourceLineNo">159</span>        short replication, long blockSize, CryptoProtocolVersion[] supportedVersions)<a name="line.159"></a>
+<span class="sourceLineNo">160</span>        throws Exception {<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      try {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>        return (HdfsFileStatus) createObject(instance, src, masked, clientName, flag, createParent,<a name="line.162"></a>
+<span class="sourceLineNo">163</span>          replication, blockSize, supportedVersions);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      } catch (InvocationTargetException e) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>        if (e.getCause() instanceof Exception) {<a name="line.165"></a>
+<span class="sourceLineNo">166</span>          throw (Exception) e.getCause();<a name="line.166"></a>
+<span class="sourceLineNo">167</span>        } else {<a name="line.167"></a>
+<span class="sourceLineNo">168</span>          throw new RuntimeException(e.getCause());<a name="line.168"></a>
+<span class="sourceLineNo">169</span>        }<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      }<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    }<a name="line.171"></a>
+<span class="sourceLineNo">172</span><a name="line.172"></a>
+<span class="sourceLineNo">173</span>    Object createObject(ClientProtocol instance, String src, FsPermission masked, String clientName,<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        EnumSetWritable&lt;CreateFlag&gt; flag, boolean createParent, short replication, long blockSize,<a name="line.174"></a>
+<span class="sourceLineNo">175</span>        CryptoProtocolVersion[] supportedVersions) throws Exception;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  }<a name="line.176"></a>
+<span class="sourceLineNo">177</span><a name="line.177"></a>
+<span class="sourceLineNo">178</span>  private static final FileCreator FILE_CREATOR;<a name="line.178"></a>
+<span class="sourceLineNo">179</span><a name="line.179"></a>
+<span class="sourceLineNo">180</span>  private static DFSClientAdaptor createDFSClientAdaptor() throws NoSuchMethodException {<a name="line.180"></a>
+<span class="sourceLineNo">181</span>    Method isClientRunningMethod = DFSClient.class.getDeclaredMethod("isClientRunning");<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    isClientRunningMethod.setAccessible(true);<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    return new DFSClientAdaptor() {<a name="line.183"></a>
+<span class="sourceLineNo">184</span><a name="line.184"></a>
+<span class="sourceLineNo">185</span>      @Override<a name="line.185"></a>
+<span class="sourceLineNo">186</span>      public boolean isClientRunning(DFSClient client) {<a name="line.186"></a>
+<span class="sourceLineNo">187</span>        try {<a name="line.187"></a>
+<span class="sourceLineNo">188</span>          return (Boolean) isClientRunningMethod.invoke(client);<a name="line.188"></a>
+<span class="sourceLineNo">189</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.189"></a>
+<span class="sourceLineNo">190</span>          throw new RuntimeException(e);<a name="line.190"></a>
+<span class="sourceLineNo">191</span>        }<a name="line.191"></a>
+<span class="sourceLineNo">192</span>      }<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    };<a name="line.193"></a>
+<span class="sourceLineNo">194</span>  }<a name="line.194"></a>
 <span class="sourceLineNo">195</span><a name="line.195"></a>
-<span class="sourceLineNo">196</span>  // helper class for creating data checksum.<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  private interface ChecksumCreater {<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    DataChecksum createChecksum(DFSClient client);<a name="line.198"></a>
-<span class="sourceLineNo">199</span>  }<a name="line.199"></a>
-<span class="sourceLineNo">200</span><a name="line.200"></a>
-<span class="sourceLineNo">201</span>  private static final ChecksumCreater CHECKSUM_CREATER;<a name="line.201"></a>
-<span class="sourceLineNo">202</span><a name="line.202"></a>
-<span class="sourceLineNo">203</span>  // helper class for creating files.<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  private interface FileCreator {<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    default HdfsFileStatus create(ClientProtocol instance, String src, FsPermission masked,<a name="line.205"></a>
-<span class="sourceLineNo">206</span>        String clientName, EnumSetWritable&lt;CreateFlag&gt; flag, boolean createParent,<a name="line.206"></a>
-<span class="sourceLineNo">207</span>        short replication, long blockSize, CryptoProtocolVersion[] supportedVersions)<a name="line.207"></a>
-<span class="sourceLineNo">208</span>        throws Exception {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      try {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        return (HdfsFileStatus) createObject(instance, src, masked, clientName, flag, createParent,<a name="line.210"></a>
-<span class="sourceLineNo">211</span>          replication, blockSize, supportedVersions);<a name="line.211"></a>
-<span class="sourceLineNo">212</span>      } catch (InvocationTargetException e) {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>        if (e.getCause() instanceof Exception) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>          throw (Exception) e.getCause();<a name="line.214"></a>
-<span class="sourceLineNo">215</span>        } else {<a name="line.215"></a>
-<span class="sourceLineNo">216</span>          throw new RuntimeException(e.getCause());<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        }<a name="line.217"></a>
-<span class="sourceLineNo">218</span>      }<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    }<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>    Object createObject(ClientProtocol instance, String src, FsPermission masked, String clientName,<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        EnumSetWritable&lt;CreateFlag&gt; flag, boolean createParent, short replication, long blockSize,<a name="line.222"></a>
-<span class="sourceLineNo">223</span>        CryptoProtocolVersion[] supportedVersions) throws Exception;<a name="line.223"></a>
-<span class="sourceLineNo">224</span>  }<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>  private static final FileCreator FILE_CREATOR;<a name="line.226"></a>
-<span class="sourceLineNo">227</span><a name="line.227"></a>
-<span class="sourceLineNo">228</span>  private static DFSClientAdaptor createDFSClientAdaptor() throws NoSuchMethodException {<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    Method isClientRunningMethod = DFSClient.class.getDeclaredMethod("isClientRunning");<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    isClientRunningMethod.setAccessible(true);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    return new DFSClientAdaptor() {<a name="line.231"></a>
-<span class="sourceLineNo">232</span><a name="line.232"></a>
-<span class="sourceLineNo">233</span>      @Override<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      public boolean isClientRunning(DFSClient client) {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        try {<a name="line.235"></a>
-<span class="sourceLineNo">236</span>          return (Boolean) isClientRunningMethod.invoke(client);<a name="line.236"></a>
-<span class="sourceLineNo">237</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.237"></a>
-<span class="sourceLineNo">238</span>          throw new RuntimeException(e);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>        }<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      }<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    };<a name="line.241"></a>
-<span class="sourceLineNo">242</span>  }<a name="line.242"></a>
-<span class="sourceLineNo">243</span><a name="line.243"></a>
-<span class="sourceLineNo">244</span>  private static LeaseManager createLeaseManager() throws NoSuchMethodException {<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    Method beginFileLeaseMethod =<a name="line.245"></a>
-<span class="sourceLineNo">246</span>        DFSClient.class.getDeclaredMethod("beginFileLease", long.class, DFSOutputStream.class);<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    beginFileLeaseMethod.setAccessible(true);<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    Method endFileLeaseMethod = DFSClient.class.getDeclaredMethod("endFileLease", long.class);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    endFileLeaseMethod.setAccessible(true);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    return new LeaseManager() {<a name="line.250"></a>
-<span class="sourceLineNo">251</span><a name="line.251"></a>
-<span class="sourceLineNo">252</span>      @Override<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      public void begin(DFSClient client, long inodeId) {<a name="line.253"></a>
-<span class="sourceLineNo">254</span>        try {<a name="line.254"></a>
-<span class="sourceLineNo">255</span>          beginFileLeaseMethod.invoke(client, inodeId, null);<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.256"></a>
-<span class="sourceLineNo">257</span>          throw new RuntimeException(e);<a name="line.257"></a>
-<span class="sourceLineNo">258</span>        }<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      }<a name="line.259"></a>
-<span class="sourceLineNo">260</span><a name="line.260"></a>
-<span class="sourceLineNo">261</span>      @Override<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      public void end(DFSClient client, long inodeId) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>        try {<a name="line.263"></a>
-<span class="sourceLineNo">264</span>          endFileLeaseMethod.invoke(client, inodeId);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>          throw new RuntimeException(e);<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        }<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      }<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    };<a name="line.269"></a>
+<span class="sourceLineNo">196</span>  private static LeaseManager createLeaseManager() throws NoSuchMethodException {<a name="line.196"></a>
+<span class="sourceLineNo">197</span>    Method beginFileLeaseMethod =<a name="line.197"></a>
+<span class="sourceLineNo">198</span>        DFSClient.class.getDeclaredMethod("beginFileLease", long.class, DFSOutputStream.class);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    beginFileLeaseMethod.setAccessible(true);<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    Method endFileLeaseMethod = DFSClient.class.getDeclaredMethod("endFileLease", long.class);<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    endFileLeaseMethod.setAccessible(true);<a name="line.201"></a>
+<span class="sourceLineNo">202</span>    return new LeaseManager() {<a name="line.202"></a>
+<span class="sourceLineNo">203</span><a name="line.203"></a>
+<span class="sourceLineNo">204</span>      @Override<a name="line.204"></a>
+<span class="sourceLineNo">205</span>      public void begin(DFSClient client, long inodeId) {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>        try {<a name="line.206"></a>
+<span class="sourceLineNo">207</span>          beginFileLeaseMethod.invoke(client, inodeId, null);<a name="line.207"></a>
+<span class="sourceLineNo">208</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>          throw new RuntimeException(e);<a name="line.209"></a>
+<span class="sourceLineNo">210</span>        }<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      }<a name="line.211"></a>
+<span class="sourceLineNo">212</span><a name="line.212"></a>
+<span class="sourceLineNo">213</span>      @Override<a name="line.213"></a>
+<span class="sourceLineNo">214</span>      public void end(DFSClient client, long inodeId) {<a name="line.214"></a>
+<span class="sourceLineNo">215</span>        try {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>          endFileLeaseMethod.invoke(client, inodeId);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.217"></a>
+<span class="sourceLineNo">218</span>          throw new RuntimeException(e);<a name="line.218"></a>
+<span class="sourceLineNo">219</span>        }<a name="line.219"></a>
+<span class="sourceLineNo">220</span>      }<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    };<a name="line.221"></a>
+<span class="sourceLineNo">222</span>  }<a name="line.222"></a>
+<span class="sourceLineNo">223</span><a name="line.223"></a>
+<span class="sourceLineNo">224</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.225"></a>
+<span class="sourceLineNo">226</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>      CryptoProtocolVersion[].class, String.class);<a name="line.227"></a>
+<span class="sourceLineNo">228</span><a name="line.228"></a>
+<span class="sourceLineNo">229</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.229"></a>
+<span class="sourceLineNo">230</span>        supportedVersions) -&gt; {<a name="line.230"></a>
+<span class="sourceLineNo">231</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.231"></a>
+<span class="sourceLineNo">232</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    };<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  }<a name="line.234"></a>
+<span class="sourceLineNo">235</span><a name="line.235"></a>
+<span class="sourceLineNo">236</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.237"></a>
+<span class="sourceLineNo">238</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      CryptoProtocolVersion[].class);<a name="line.239"></a>
+<span class="sourceLineNo">240</span><a name="line.240"></a>
+<span class="sourceLineNo">241</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.241"></a>
+<span class="sourceLineNo">242</span>        supportedVersions) -&gt; {<a name="line.242"></a>
+<span class="sourceLineNo">243</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.243"></a>
+<span class="sourceLineNo">244</span>        createParent, replication, blockSize, supportedVersions);<a name="line.244"></a>
+<span class="sourceLineNo">245</span>    };<a name="line.245"></a>
+<span class="sourceLineNo">246</span>  }<a name="line.246"></a>
+<span class="sourceLineNo">247</span><a name="line.247"></a>
+<span class="sourceLineNo">248</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    try {<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      return createFileCreator3();<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    } catch (NoSuchMethodException e) {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    }<a name="line.253"></a>
+<span class="sourceLineNo">254</span>    return createFileCreator2();<a name="line.254"></a>
+<span class="sourceLineNo">255</span>  }<a name="line.255"></a>
+<span class="sourceLineNo">256</span><a name="line.256"></a>
+<span class="sourceLineNo">257</span>  // cancel the processing if DFSClient is already closed.<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.258"></a>
+<span class="sourceLineNo">259</span><a name="line.259"></a>
+<span class="sourceLineNo">260</span>    private final DFSClient client;<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>    public CancelOnClose(DFSClient client) {<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      this.client = client;<a name="line.263"></a>
+<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
+<span class="sourceLineNo">265</span><a name="line.265"></a>
+<span class="sourceLineNo">266</span>    @Override<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    public boolean progress() {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
 <span class="sourceLineNo">270</span>  }<a name="line.270"></a>
 <span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  private static PipelineAckStatusGetter createPipelineAckStatusGetter27()<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      throws NoSuchMethodException {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    Method getFlagListMethod = PipelineAckProto.class.getMethod("getFlagList");<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    @SuppressWarnings("rawtypes")<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    Class&lt;? extends Enum&gt; ecnClass;<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    try {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      ecnClass = Class.forName("org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck$ECN")<a name="line.278"></a>
-<span class="sourceLineNo">279</span>          .asSubclass(Enum.class);<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    } catch (ClassNotFoundException e) {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      String msg = "Couldn't properly initialize the PipelineAck.ECN class. Please " +<a name="line.281"></a>
-<span class="sourceLineNo">282</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.282"></a>
-<span class="sourceLineNo">283</span>          "HBASE-16110 for more information.";<a name="line.283"></a>
-<span class="sourceLineNo">284</span>      LOG.error(msg, e);<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      throw new Error(msg, e);<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    @SuppressWarnings("unchecked")<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    Enum&lt;?&gt; disabledECN = Enum.valueOf(ecnClass, "DISABLED");<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    Method getReplyMethod = PipelineAckProto.class.getMethod("getReply", int.class);<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    Method combineHeaderMethod =<a name="line.290"></a>
-<span class="sourceLineNo">291</span>        PipelineAck.class.getMethod("combineHeader", ecnClass, Status.class);<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    Method getStatusFromHeaderMethod =<a name="line.292"></a>
-<span class="sourceLineNo">293</span>        PipelineAck.class.getMethod("getStatusFromHeader", int.class);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    return new PipelineAckStatusGetter() {<a name="line.294"></a>
-<span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>      @Override<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      public Status get(PipelineAckProto ack) {<a name="line.297"></a>
-<span class="sourceLineNo">298</span>        try {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>          @SuppressWarnings("unchecked")<a name="line.299"></a>
-<span class="sourceLineNo">300</span>          List&lt;Integer&gt; flagList = (List&lt;Integer&gt;) getFlagListMethod.invoke(ack);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>          Integer headerFlag;<a name="line.301"></a>
-<span class="sourceLineNo">302</span>          if (flagList.isEmpty()) {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>            Status reply = (Status) getReplyMethod.invoke(ack, 0);<a name="line.303"></a>
-<span class="sourceLineNo">304</span>            headerFlag = (Integer) combineHeaderMethod.invoke(null, disabledECN, reply);<a name="line.304"></a>
-<span class="sourceLineNo">305</span>          } else {<a name="line.305"></a>
-<span class="sourceLineNo">306</span>            headerFlag = flagList.get(0);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>          }<a name="line.307"></a>
-<span class="sourceLineNo">308</span>          return (Status) getStatusFromHeaderMethod.invoke(null, headerFlag);<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          throw new RuntimeException(e);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>        }<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      }<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    };<a name="line.313"></a>
-<span class="sourceLineNo">314</span>  }<a name="line.314"></a>
-<span class="sourceLineNo">315</span><a name="line.315"></a>
-<span class="sourceLineNo">316</span>  private static PipelineAckStatusGetter createPipelineAckStatusGetter26()<a name="line.316"></a>
-<span class="sourceLineNo">317</span>      throws NoSuchMethodException {<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    Method getStatusMethod = PipelineAckProto.class.getMethod("getStatus", int.class);<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    return new PipelineAckStatusGetter() {<a name="line.319"></a>
-<span class="sourceLineNo">320</span><a name="line.320"></a>
-<span class="sourceLineNo">321</span>      @Override<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      public Status get(PipelineAckProto ack) {<a name="line.322"></a>
-<span class="sourceLineNo">323</span>        try {<a name="line.323"></a>
-<span class="sourceLineNo">324</span>          return (Status) getStatusMethod.invoke(ack, 0);<a name="line.324"></a>
-<span class="sourceLineNo">325</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>          throw new RuntimeException(e);<a name="line.326"></a>
-<span class="sourceLineNo">327</span>        }<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      }<a name="line.328"></a>
-<span class="sourceLineNo">329</span>    };<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  }<a name="line.330"></a>
-<span class="sourceLineNo">331</span><a name="line.331"></a>
-<span class="sourceLineNo">332</span>  private static PipelineAckStatusGetter createPipelineAckStatusGetter()<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      throws NoSuchMethodException {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    try {<a name="line.334"></a>
-<span class="sourceLineNo">335</span>      return createPipelineAckStatusGetter27();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    } catch (NoSuchMethodException e) {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>      LOG.debug("Can not get expected method " + e.getMessage() +<a name="line.337"></a>
-<span class="sourceLineNo">338</span>          ", this usually because your Hadoop is pre 2.7.0, " +<a name="line.338"></a>
-<span class="sourceLineNo">339</span>          "try the methods in Hadoop 2.6.x instead.");<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    }<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    return createPipelineAckStatusGetter26();<a name="line.341"></a>
-<span class="sourceLineNo">342</span>  }<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>  private static StorageTypeSetter createStorageTypeSetter() throws NoSuchMethodException {<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    Method setStorageTypeMethod =<a name="line.345"></a>
-<span class="sourceLineNo">346</span>        OpWriteBlockProto.Builder.class.getMethod("setStorageType", StorageTypeProto.class);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    ImmutableMap.Builder&lt;String, StorageTypeProto&gt; builder = ImmutableMap.builder();<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    for (StorageTypeProto storageTypeProto : StorageTypeProto.values()) {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      builder.put(storageTypeProto.name(), storageTypeProto);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    }<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    ImmutableMap&lt;String, StorageTypeProto&gt; name2ProtoEnum = builder.build();<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    return new StorageTypeSetter() {<a name="line.352"></a>
+<span class="sourceLineNo">272</span>  static {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    try {<a name="line.273"></a>
+<span class="sourceLineNo">274</span>      LEASE_MANAGER = createLeaseManager();<a name="line.274"></a>
+<span class="sourceLineNo">275</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.275"></a>
+<span class="sourceLineNo">276</span>      FILE_CREATOR = createFileCreator();<a name="line.276"></a>
+<span class="sourceLineNo">277</span>    } catch (Exception e) {<a name="line.277"></a>
+<span class="sourceLineNo">278</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.278"></a>
+<span class="sourceLineNo">279</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.279"></a>
+<span class="sourceLineNo">280</span>          "HBASE-16110 for more information.";<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      LOG.error(msg, e);<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      throw new Error(msg, e);<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.287"></a>
+<span class="sourceLineNo">288</span>  }<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  }<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    return client.getConf().createChecksum(null);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  }<a name="line.296"></a>
+<span class="sourceLineNo">297</span><a name="line.297"></a>
+<span class="sourceLineNo">298</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.298"></a>
+<span class="sourceLineNo">299</span>    List&lt;Integer&gt; flagList = ack.getFlagList();<a name="line.299"></a>
+<span class="sourceLineNo">300</span>    Integer headerFlag;<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    if (flagList.isEmpty()) {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      Status reply = ack.getReply(0);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      headerFlag = PipelineAck.combineHeader(ECN.DISABLED, reply);<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    } else {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      headerFlag = flagList.get(0);<a name="line.305"></a>
+<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
+<span class="sourceLineNo">307</span>    return PipelineAck.getStatusFromHeader(headerFlag);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>  }<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.310"></a>
+<span class="sourceLineNo">311</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.311"></a>
+<span class="sourceLineNo">312</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.312"></a>
+<span class="sourceLineNo">313</span>      new ProtobufVarint32FrameDecoder(),<a name="line.313"></a>
+<span class="sourceLineNo">314</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.314"></a>
+<span class="sourceLineNo">315</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.315"></a>
+<span class="sourceLineNo">316</span><a name="line.316"></a>
+<span class="sourceLineNo">317</span>        @Override<a name="line.317"></a>
+<span class="sourceLineNo">318</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.318"></a>
+<span class="sourceLineNo">319</span>            throws Exception {<a name="line.319"></a>
+<span class="sourceLineNo">320</span>          Status pipelineStatus = resp.getStatus();<a name="line.320"></a>
+<span class="sourceLineNo">321</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.321"></a>
+<span class="sourceLineNo">322</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.322"></a>
+<span class="sourceLineNo">323</span>          }<a name="line.323"></a>
+<span class="sourceLineNo">324</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.324"></a>
+<span class="sourceLineNo">325</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.325"></a>
+<span class="sourceLineNo">326</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.326"></a>
+<span class="sourceLineNo">327</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.327"></a>
+<span class="sourceLineNo">328</span>                  resp.getMessage() + ", " + logInfo);<a name="line.328"></a>
+<span class="sourceLineNo">329</span>            } else {<a name="line.329"></a>
+<span class="sourceLineNo">330</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.330"></a>
+<span class="sourceLineNo">331</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.331"></a>
+<span class="sourceLineNo">332</span>            }<a name="line.332"></a>
+<span class="sourceLineNo">333</span>          }<a name="line.333"></a>
+<span class="sourceLineNo">334</span>          // success<a name="line.334"></a>
+<span class="sourceLineNo">335</span>          ChannelPipeline p = ctx.pipeline();<a name="line.335"></a>
+<span class="sourceLineNo">336</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.336"></a>
+<span class="sourceLineNo">337</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.337"></a>
+<span class="sourceLineNo">338</span>            // of pipeline.<a name="line.338"></a>
+<span class="sourceLineNo">339</span>            if (handler instanceof IdleStateHandler) {<a name="line.339"></a>
+<span class="sourceLineNo">340</span>              break;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>            }<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          }<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.343"></a>
+<span class="sourceLineNo">344</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.344"></a>
+<span class="sourceLineNo">345</span>          ctx.channel().config().setAutoRead(false);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>          promise.trySuccess(ctx.channel());<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
+<span class="sourceLineNo">348</span><a name="line.348"></a>
+<span class="sourceLineNo">349</span>        @Override<a name="line.349"></a>
+<span class="sourceLineNo">350</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.350"></a>
+<span class="sourceLineNo">351</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.351"></a>
+<span class="sourceLineNo">352</span>        }<a name="line.352"></a>
 <span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>      @Override<a name="line.354"></a>
-<span class="sourceLineNo">355</span>      public OpWriteBlockProto.Builder set(OpWriteBlockProto.Builder builder, Enum&lt;?&gt; storageType) {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>        Object protoEnum = name2ProtoEnum.get(storageType.name());<a name="line.356"></a>
-<span class="sourceLineNo">357</span>        try {<a name="line.357"></a>
-<span class="sourceLineNo">358</span>          setStorageTypeMethod.invoke(builder, protoEnum);<a name="line.358"></a>
-<span class="sourceLineNo">359</span>        } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {<a name="line.359"></a>
-<span class="sourceLineNo">360</span>          throw new RuntimeException(e);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>        }<a name="line.361"></a>
-<span class="sourceLineNo">362</span>        return builder;<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      }<a name="line.363"></a>
-<span class="sourceLineNo">364</span>    };<a name="line.364"></a>
-<span class="sourceLineNo">365</span>  }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>  private static BlockAdder createBlockAdder() throws NoSuchMethodException {<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    for (Method method : ClientProtocol.class.getMethods()) {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      if (method.getName().equals("addBlock")) {<a name="line.369"></a>
-<span class="sourceLineNo">370</span>        Method addBlockMethod = method;<a name="line.370"></a>
-<span class="sourceLineNo">371</span>        Class&lt;?&gt;[] paramTypes = addBlockMethod.getParameterTypes();<a name="line.371"></a>
-<span class="sourceLineNo">372</span>        if (paramTypes[paramTypes.length - 1] == String[].class) {<a name="line.372"></a>
-<span class="sourceLineNo">373</span>          return new BlockAdder() {<a name="line.373"></a>
-<span class="sourceLineNo">374</span><a name="line.374"></a>
-<span class="sourceLineNo">375</span>            @Override<a name="line.375"></a>
-<span class="sourceLineNo">376</span>            public LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,<a name="line.376"></a>
-<span class="sourceLineNo">377</span>                ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId,<a name="line.377"></a>
-<span class="sourceLineNo">378</span>                String[] favoredNodes) throws IOException {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>              try {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>                return (LocatedBlock) addBlockMethod.invoke(namenode, src, clientName, previous,<a name="line.380"></a>
-<span class="sourceLineNo">381</span>                  excludeNodes, fileId, favoredNodes);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>              } catch (IllegalAccessException e) {<a name="line.382"></a>
-<span class="sourceLineNo">383</span>                throw new RuntimeException(e);<a name="line.383"></a>
-<span class="sourceLineNo">384</span>              } catch (InvocationTargetException e) {<a name="line.384"></a>
-<span class="sourceLineNo">385</span>                Throwables.propagateIfPossible(e.getTargetException(), IOException.class);<a name="line.385"></a>
-<span class="sourceLineNo">386</span>                throw new RuntimeException(e);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>              }<a name="line.387"></a>
-<span class="sourceLineNo">388</span>            }<a name="line.388"></a>
-<span class="sourceLineNo">389</span>          };<a name="line.389"></a>
-<span class="sourceLineNo">390</span>        } else {<a name="line.390"></a>
-<span class="sourceLineNo">391</span>          return new BlockAdder() {<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>            @Override<a name="line.393"></a>
-<span class="sourceLineNo">394</span>            public LocatedBlock addBlock(ClientProtocol namenode, String src, String clientName,<a name="line.394"></a>
-<span class="sourceLineNo">395</span>                ExtendedBlock previous, DatanodeInfo[] excludeNodes, long fileId,<a name="line.395"></a>
-<span class="sourceLineNo">396</span>                String[] favoredNodes) throws IOException {<a name="line.396"></a>
-<span class="sourceLineNo">397</span>              try {<a name="line.397"></a>
-<span class="sourceLineNo">398</span>                return (LocatedBlock) addBlockMethod.invoke(namenode, src, clientName, previous,<a name="line.398"></a>
-<span class="sourceLineNo">399</span>                  excludeNodes, fileId, favoredNodes, null);<a name="line.399"></a>
-<span class="sourceLineNo">400</span>              } catch (IllegalAccessException e) {<a name="line.400"></a>
-<span class="sourceLineNo">401</span>                throw new RuntimeException(e);<a name="line.401"></a>
-<span class="sourceLineNo">402</span>              } catch (InvocationTargetException e) {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>                Throwables.propagateIfPossible(e.getTargetException(), IOException.class);<a name="line.403"></a>
-<span class="sourceLineNo">404</span>                throw new RuntimeException(e);<a name="line.404"></a>
-<span class="sourceLineNo">405</span>              }<a name="line.405"></a>
-<span class="sourceLineNo">406</span>            }<a name="line.406"></a>
-<span class="sourceLineNo">407</span>          };<a name="line.407"></a>
-<span class="sourceLineNo">408</span>        }<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      }<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    }<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    throw new NoSuchMethodException("Can not find addBlock method in ClientProtocol");<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  private static PBHelper createPBHelper() throws NoSuchMethodException {<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    Class&lt;?&gt; helperClass;<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    String clazzName = "org.apache.hadoop.hdfs.protocolPB.PBHelperClient";<a name="line.416"></a>
-<span class="sourceLineNo">417</span>    try {<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      helperClass = Class.forName(clazzName);<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    } catch (ClassNotFoundException e) {<a name="line.419"></a>
-<span class="sourceLineNo">420</span>      helperClass = org.apache.hadoop.hdfs.protocolPB.PBHelper.class;<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      LOG.debug("" + clazzName + " not found (Hadoop is pre-2.8.0?); using " +<a name="line.421"></a>
-<span class="sourceLineNo">422</span>          helperClass.toString() + " instead.");<a name="line.422"></a>
-<span class="sourceLineNo">423</span>    }<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    Method convertEBMethod = helperClass.getMethod("convert", ExtendedBlock.class);<a name="line.424"></a>
-<span class="sourceLineNo">425</span>    Method convertTokenMethod = helperClass.getMethod("convert", Token.class);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    return new PBHelper() {<a name="line.426"></a>
-<span class="sourceLineNo">427</span><a name="line.427"></a>
-<span class="sourceLineNo">428</span>      @Override<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      public ExtendedBlockProto convert(ExtendedBlock b) {<a name="line.429"></a>
-<span class="sourceLineNo">430</span>        try {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>          return (ExtendedBlockProto) convertEBMethod.invoke(null, b);<a name="line.431"></a>
-<span class="sourceLineNo">432</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.432"></a>
-<span class="sourceLineNo">433</span>          throw new RuntimeException(e);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>        }<a name="line.434"></a>
-<span class="sourceLineNo">435</span>      }<a name="line.435"></a>
+<span class="sourceLineNo">354</span>        @Override<a name="line.354"></a>
+<span class="sourceLineNo">355</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.355"></a>
+<span class="sourceLineNo">356</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.356"></a>
+<span class="sourceLineNo">357</span>            promise<a name="line.357"></a>
+<span class="sourceLineNo">358</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.358"></a>
+<span class="sourceLineNo">359</span>          } else {<a name="line.359"></a>
+<span class="sourceLineNo">360</span>            super.userEventTriggered(ctx, evt);<a name="line.360"></a>
+<span class="sourceLineNo">361</span>          }<a name="line.361"></a>
+<span class="sourceLineNo">362</span>        }<a name="line.362"></a>
+<span class="sourceLineNo">363</span><a name="line.363"></a>
+<span class="sourceLineNo">364</span>        @Override<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.365"></a>
+<span class="sourceLineNo">366</span>          promise.tryFailure(cause);<a name="line.366"></a>
+<span class="sourceLineNo">367</span>        }<a name="line.367"></a>
+<span class="sourceLineNo">368</span>      });<a name="line.368"></a>
+<span class="sourceLineNo">369</span>  }<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>  private static void requestWriteBlock(Channel channel, StorageType storageType,<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.372"></a>
+<span class="sourceLineNo">373</span>    OpWriteBlockProto proto =<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    int protoLen = proto.getSerializedSize();<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    ByteBuf buffer =<a name="line.376"></a>
+<span class="sourceLineNo">377</span>      channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.379"></a>
+<span class="sourceLineNo">380</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    channel.writeAndFlush(buffer);<a name="line.381"></a>
+<span class="sourceLineNo">382</span>  }<a name="line.382"></a>
+<span class="sourceLineNo">383</span><a name="line.383"></a>
+<span class="sourceLineNo">384</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.384"></a>
+<span class="sourceLineNo">385</span>      StorageType storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      throws IOException {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.388"></a>
+<span class="sourceLineNo">389</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.389"></a>
+<span class="sourceLineNo">390</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.390"></a>
+<span class="sourceLineNo">391</span><a name="line.391"></a>
+<span class="sourceLineNo">392</span>      @Override<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.393"></a>
+<span class="sourceLineNo">394</span>        if (future.isSuccess()) {<a name="line.394"></a>
+<span class="sourceLineNo">395</span>          // setup response processing pipeline first, then send request.<a name="line.395"></a>
+<span class="sourceLineNo">396</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.396"></a>
+<span class="sourceLineNo">397</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.397"></a>
+<span class="sourceLineNo">398</span>        } else {<a name="line.398"></a>
+<span class="sourceLineNo">399</span>          promise.tryFailure(future.cause());<a name="line.399"></a>
+<span class="sourceLineNo">400</span>        }<a name="line.400"></a>
+<span class="sourceLineNo">401</span>      }<a name="line.401"></a>
+<span class="sourceLineNo">402</span>    });<a name="line.402"></a>
+<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.405"></a>
+<span class="sourceLineNo">406</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.406"></a>
+<span class="sourceLineNo">407</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.407"></a>
+<span class="sourceLineNo">408</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.408"></a>
+<span class="sourceLineNo">409</span>    StorageType[] storageTypes = locatedBlock.getStorageTypes();<a name="line.409"></a>
+<span class="sourceLineNo">410</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.410"></a>
+<span class="sourceLineNo">411</span>    boolean connectToDnViaHostname =<a name="line.411"></a>
+<span class="sourceLineNo">412</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.412"></a>
+<span class="sourceLineNo">413</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.413"></a>
+<span class="sourceLineNo">414</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.414"></a>
+<span class="sourceLineNo">415</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.415"></a>
+<span class="sourceLineNo">416</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.416"></a>
+<span class="sourceLineNo">417</span>      .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PBHelperClient.convert(blockCopy))<a name="line.417"></a>
+<span class="sourceLineNo">418</span>        .setToken(PBHelperClient.convert(locatedBlock.getBlockToken())))<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      .setClientName(clientName).build();<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.420"></a>
+<span class="sourceLineNo">421</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.421"></a>
+<span class="sourceLineNo">422</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.422"></a>
+<span class="sourceLineNo">423</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.423"></a>
+<span class="sourceLineNo">424</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.424"></a>
+<span class="sourceLineNo">425</span>        .setRequestedChecksum(checksumProto)<a name="line.425"></a>
+<span class="sourceLineNo">426</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.428"></a>
+<span class="sourceLineNo">429</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.429"></a>
+<span class="sourceLineNo">430</span>      StorageType storageType = storageTypes[i];<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.431"></a>
+<span class="sourceLineNo">432</span>      futureList.add(promise);<a name="line.432"></a>
+<span class="sourceLineNo">433</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.433"></a>
+<span class="sourceLineNo">434</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.434"></a>
+<span class="sourceLineNo">435</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.435"></a>
 <span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>      @Override<a name="line.437"></a>
-<span class="sourceLineNo">438</span>      public TokenProto convert(Token&lt;?&gt; tok) {<a name="line.438"></a>
-<span class="sourceLineNo">439</span>        try {<a name="line.439"></a>
-<span class="sourceLineNo">440</span>          return (TokenProto) convertTokenMethod.invoke(null, tok);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>          throw new RuntimeException(e);<a name="line.442"></a>
-<span class="sourceLineNo">443</span>        }<a name="line.443"></a>
-<span class="sourceLineNo">444</span>      }<a name="line.444"></a>
-<span class="sourceLineNo">445</span>    };<a name="line.445"></a>
-<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>  private static ChecksumCreater createChecksumCreater28(Method getConfMethod, Class&lt;?&gt; confClass)<a name="line.448"></a>
-<span class="sourceLineNo">449</span>      throws NoSuchMethodException {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    for (Method method : confClass.getMethods()) {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      if (method.getName().equals("createChecksum")) {<a name="line.451"></a>
-<span class="sourceLineNo">452</span>        Method createChecksumMethod = method;<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        return new ChecksumCreater() {<a name="line.453"></a>
-<span class="sourceLineNo">454</span><a name="line.454"></a>
-<span class="sourceLineNo">455</span>          @Override<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          public DataChecksum createChecksum(DFSClient client) {<a name="line.456"></a>
-<span class="sourceLineNo">457</span>            try {<a name="line.457"></a>
-<span class="sourceLineNo">458</span>              return (DataChecksum) createChecksumMethod.invoke(getConfMethod.invoke(client),<a name="line.458"></a>
-<span class="sourceLineNo">459</span>                (Object) null);<a name="line.459"></a>
-<span class="sourceLineNo">460</span>            } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.460"></a>
-<span class="sourceLineNo">461</span>              throw new RuntimeException(e);<a name="line.461"></a>
-<span class="sourceLineNo">462</span>            }<a name="line.462"></a>
-<span class="sourceLineNo">463</span>          }<a name="line.463"></a>
-<span class="sourceLineNo">464</span>        };<a name="line.464"></a>
-<span class="sourceLineNo">465</span>      }<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    }<a name="line.466"></a>
-<span class="sourceLineNo">467</span>    throw new NoSuchMethodException("Can not find createChecksum method in DfsClientConf");<a name="line.467"></a>
-<span class="sourceLineNo">468</span>  }<a name="line.468"></a>
-<span class="sourceLineNo">469</span><a name="line.469"></a>
-<span class="sourceLineNo">470</span>  private static ChecksumCreater createChecksumCreater27(Method getConfMethod, Class&lt;?&gt; confClass)<a name="line.470"></a>
-<span class="sourceLineNo">471</span>      throws NoSuchMethodException {<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    Method createChecksumMethod = confClass.getDeclaredMethod("createChecksum");<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    createChecksumMethod.setAccessible(true);<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    return new ChecksumCreater() {<a name="line.474"></a>
-<span class="sourceLineNo">475</span><a name="line.475"></a>
-<span class="sourceLineNo">476</span>      @Override<a name="line.476"></a>
-<span class="sourceLineNo">477</span>      public DataChecksum createChecksum(DFSClient client) {<a name="line.477"></a>
-<span class="sourceLineNo">478</span>        try {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>          return (DataChecksum) createChecksumMethod.invoke(getConfMethod.invoke(client));<a name="line.479"></a>
-<span class="sourceLineNo">480</span>        } catch (IllegalAccessException | InvocationTargetException e) {<a name="line.480"></a>
-<span class="sourceLineNo">481</span>          throw new RuntimeException(e);<a name="line.481"></a>
-<span class="sourceLineNo">482</span>        }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      }<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    };<a name="line.484"></a>
-<span class="sourceLineNo">485</span>  }<a name="line.485"></a>
-<span class="sourceLineNo">486</span><a name="line.486"></a>
-<span class="sourceLineNo">487</span>  private static ChecksumCreater createChecksumCreater()<a name="line.487"></a>
-<span class="sourceLineNo">488</span>      throws NoSuchMethodException, ClassNotFoundException {<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    Method getConfMethod = DFSClient.class.getMethod("getConf");<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    try {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>      return createChecksumCreater28(getConfMethod,<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        Class.forName("org.apache.hadoop.hdfs.client.impl.DfsClientConf"));<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    } catch (ClassNotFoundException e) {<a name="line.493"></a>
-<span class="sourceLineNo">494</span>      LOG.debug("No DfsClientConf class found, should be hadoop 2.7-", e);<a name="line.494"></a>
-<span class="sourceLineNo">495</span>    }<a name="line.495"></a>
-<span class="sourceLineNo">496</span>    return createChecksumCreater27(getConfMethod,<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      Class.forName("org.apache.hadoop.hdfs.DFSClient$Conf"));<a name="line.497"></a>
-<span class="sourceLineNo">498</span>  }<a name="line.498"></a>
-<span class="sourceLineNo">499</span><a name="line.499"></a>
-<span class="sourceLineNo">500</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.501"></a>
-<span class="sourceLineNo">502</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      CryptoProtocolVersion[].class, String.class);<a name="line.503"></a>
-<span class="sourceLineNo">504</span><a name="line.504"></a>
-<span class="sourceLineNo">505</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        supportedVersions) -&gt; {<a name="line.506"></a>
-<span class="sourceLineNo">507</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.507"></a>
-<span class="sourceLineNo">508</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.508"></a>
-<span class="sourceLineNo">509</span>    };<a name="line.509"></a>
-<span class="sourceLineNo">510</span>  }<a name="line.510"></a>
-<span class="sourceLineNo">511</span><a name="line.511"></a>
-<span class="sourceLineNo">512</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.512"></a>
-<span class="sourceLineNo">513</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      CryptoProtocolVersion[].class);<a name="line.515"></a>
-<span class="sourceLineNo">516</span><a name="line.516"></a>
-<span class="sourceLineNo">517</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.517"></a>
-<span class="sourceLineNo">518</span>        supportedVersions) -&gt; {<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.519"></a>
-<span class="sourceLineNo">520</span>        createParent, replication, blockSize, supportedVersions);<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    };<a name="line.521"></a>
-<span class="sourceLineNo">522</span>  }<a name="line.522"></a>
-<span class="sourceLineNo">523</span><a name="line.523"></a>
-<span class="sourceLineNo">524</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>    try {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>      return createFileCreator3();<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    } catch (NoSuchMethodException e) {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    }<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    return createFileCreator2();<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  }<a name="line.531"></a>
-<span class="sourceLineNo">532</span><a name="line.532"></a>
-<span class="sourceLineNo">533</span>  // cancel the processing if DFSClient is already closed.<a name="line.533"></a>
-<span class="sourceLineNo">534</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.534"></a>
-<span class="sourceLineNo">535</span><a name="line.535"></a>
-<span class="sourceLineNo">536</span>    private final DFSClient client;<a name="line.536"></a>
-<span class="sourceLineNo">537</span><a name="line.537"></a>
-<span class="sourceLineNo">538</span>    public CancelOnClose(DFSClient client) {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>      this.client = client;<a name="line.539"></a>
-<span class="sourceLineNo">540</span>    }<a name="line.540"></a>
-<span class="sourceLineNo">541</span><a name="line.541"></a>
-<span class="sourceLineNo">542</span>    @Override<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    public boolean progress() {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    }<a name="line.545"></a>
-<span class="sourceLineNo">546</span>  }<a name="line.546"></a>
-<span class="sourceLineNo">547</span><a name="line.547"></a>
-<span class="sourceLineNo">548</span>  static {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    try {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>      PIPELINE_ACK_STATUS_GETTER = createPipelineAckStatusGetter();<a name="line.550"></a>
-<span class="sourceLineNo">551</span>      STORAGE_TYPE_SETTER = createStorageTypeSetter();<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      BLOCK_ADDER = createBlockAdder();<a name="line.552"></a>
-<span class="sourceLineNo">553</span>      LEASE_MANAGER = createLeaseManager();<a name="line.553"></a>
-<span class="sourceLineNo">554</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      PB_HELPER = createPBHelper();<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      CHECKSUM_CREATER = createChecksumCreater();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>      FILE_CREATOR = createFileCreator();<a name="line.557"></a>
-<span class="sourceLineNo">558</span>    } catch (Exception e) {<a name="line.558"></a>
-<span class="sourceLineNo">559</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.559"></a>
-<span class="sourceLineNo">560</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.560"></a>
-<span class="sourceLineNo">561</span>          "HBASE-16110 for more information.";<a name="line.561"></a>
-<span class="sourceLineNo">562</span>      LOG.error(msg, e);<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      throw new Error(msg, e);<a name="line.563"></a>
-<span class="sourceLineNo">564</span>    }<a name="line.564"></a>
-<span class="sourceLineNo">565</span>  }<a name="line.565"></a>
-<span class="sourceLineNo">566</span><a name="line.566"></a>
-<span class="sourceLineNo">567</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.568"></a>
-<span class="sourceLineNo">569</span>  }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.572"></a>
-<span class="sourceLineNo">573</span>  }<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    return CHECKSUM_CREATER.createChecksum(client);<a name="line.576"></a>
-<span class="sourceLineNo">577</span>  }<a name="line.577"></a>
-<span class="sourceLineNo">578</span><a name="line.578"></a>
-<span class="sourceLineNo">579</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    return PIPELINE_ACK_STATUS_GETTER.get(ack);<a name="line.580"></a>
-<span class="sourceLineNo">581</span>  }<a name="line.581"></a>
-<span class="sourceLineNo">582</span><a name="line.582"></a>
-<span class="sourceLineNo">583</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.583"></a>
-<span class="sourceLineNo">584</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.584"></a>
-<span class="sourceLineNo">585</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.585"></a>
-<span class="sourceLineNo">586</span>      new ProtobufVarint32FrameDecoder(),<a name="line.586"></a>
-<span class="sourceLineNo">587</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.587"></a>
-<span class="sourceLineNo">588</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>        @Override<a name="line.590"></a>
-<span class="sourceLineNo">591</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.591"></a>
-<span class="sourceLineNo">592</span>            throws Exception {<a name="line.592"></a>
-<span class="sourceLineNo">593</span>          Status pipelineStatus = resp.getStatus();<a name="line.593"></a>
-<span class="sourceLineNo">594</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.595"></a>
-<span class="sourceLineNo">596</span>          }<a name="line.596"></a>
-<span class="sourceLineNo">597</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.597"></a>
-<span class="sourceLineNo">598</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.599"></a>
-<span class="sourceLineNo">600</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.600"></a>
-<span class="sourceLineNo">601</span>                  resp.getMessage() + ", " + logInfo);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>            } else {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.603"></a>
-<span class="sourceLineNo">604</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>            }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>          }<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          // success<a name="line.607"></a>
-<span class="sourceLineNo">608</span>          ChannelPipeline p = ctx.pipeline();<a name="line.608"></a>
-<span class="sourceLineNo">609</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.610"></a>
-<span class="sourceLineNo">611</span>            // of pipeline.<a name="line.611"></a>
-<span class="sourceLineNo">612</span>            if (handler instanceof IdleStateHandler) {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>              break;<a name="line.613"></a>
-<span class="sourceLineNo">614</span>            }<a name="line.614"></a>
-<span class="sourceLineNo">615</span>          }<a name="line.615"></a>
-<span class="sourceLineNo">616</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.616"></a>
-<span class="sourceLineNo">617</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.617"></a>
-<span class="sourceLineNo">618</span>          ctx.channel().config().setAutoRead(false);<a name="line.618"></a>
-<span class="sourceLineNo">619</span>          promise.trySuccess(ctx.channel());<a name="line.619"></a>
-<span class="sourceLineNo">620</span>        }<a name="line.620"></a>
-<span class="sourceLineNo">621</span><a name="line.621"></a>
-<span class="sourceLineNo">622</span>        @Override<a name="line.622"></a>
-<span class="sourceLineNo">623</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.624"></a>
-<span class="sourceLineNo">625</span>        }<a name="line.625"></a>
-<span class="sourceLineNo">626</span><a name="line.626"></a>
-<span class="sourceLineNo">627</span>        @Override<a name="line.627"></a>
-<span class="sourceLineNo">628</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.628"></a>
-<span class="sourceLineNo">629</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.629"></a>
-<span class="sourceLineNo">630</span>            promise<a name="line.630"></a>
-<span class="sourceLineNo">631</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          } else {<a name="line.632"></a>
-<span class="sourceLineNo">633</span>            super.userEventTriggered(ctx, evt);<a name="line.633"></a>
-<span class="sourceLineNo">634</span>          }<a name="line.634"></a>
-<span class="sourceLineNo">635</span>        }<a name="line.635"></a>
-<span class="sourceLineNo">636</span><a name="line.636"></a>
-<span class="sourceLineNo">637</span>        @Override<a name="line.637"></a>
-<span class="sourceLineNo">638</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.638"></a>
-<span class="sourceLineNo">639</span>          promise.tryFailure(cause);<a name="line.639"></a>
-<span class="sourceLineNo">640</span>        }<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      });<a name="line.641"></a>
-<span class="sourceLineNo">642</span>  }<a name="line.642"></a>
-<span class="sourceLineNo">643</span><a name="line.643"></a>
-<span class="sourceLineNo">644</span>  private static void requestWriteBlock(Channel channel, Enum&lt;?&gt; storageType,<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.645"></a>
-<span class="sourceLineNo">646</span>    OpWriteBlockProto proto = STORAGE_TYPE_SETTER.set(writeBlockProtoBuilder, storageType).build();<a name="line.646"></a>
-<span class="sourceLineNo">647</span>    int protoLen = proto.getSerializedSize();<a name="line.647"></a>
-<span class="sourceLineNo">648</span>    ByteBuf buffer =<a name="line.648"></a>
-<span class="sourceLineNo">649</span>        channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.650"></a>
-<span class="sourceLineNo">651</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.651"></a>
-<span class="sourceLineNo">652</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    channel.writeAndFlush(buffer);<a name="line.653"></a>
-<span class="sourceLineNo">654</span>  }<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.656"></a>
-<span class="sourceLineNo">657</span>      Enum&lt;?&gt; storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      throws IOException {<a name="line.659"></a>
-<span class="sourceLineNo">660</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.662"></a>
-<span class="sourceLineNo">663</span><a name="line.663"></a>
-<span class="sourceLineNo">664</span>      @Override<a name="line.664"></a>
-<span class="sourceLineNo">665</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        if (future.isSuccess()) {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>          // setup response processing pipeline first, then send request.<a name="line.667"></a>
-<span class="sourceLineNo">668</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.669"></a>
-<span class="sourceLineNo">670</span>        } else {<a name="line.670"></a>
-<span class="sourceLineNo">671</span>          promise.tryFailure(future.cause());<a name="line.671"></a>
-<span class="sourceLineNo">672</span>        }<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    });<a name="line.674"></a>
-<span class="sourceLineNo">675</span>  }<a name="line.675"></a>
-<span class="sourceLineNo">676</span><a name="line.676"></a>
-<span class="sourceLineNo">677</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.677"></a>
-<span class="sourceLineNo">678</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.678"></a>
-<span class="sourceLineNo">679</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.680"></a>
-<span class="sourceLineNo">681</span>    Enum&lt;?&gt;[] storageTypes = locatedBlock.getStorageTypes();<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.682"></a>
-<span class="sourceLineNo">683</span>    boolean connectToDnViaHostname =<a name="line.683"></a>
-<span class="sourceLineNo">684</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.684"></a>
-<span class="sourceLineNo">685</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.686"></a>
-<span class="sourceLineNo">687</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.687"></a>
-<span class="sourceLineNo">688</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.688"></a>
-<span class="sourceLineNo">689</span>        .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PB_HELPER.convert(blockCopy))<a name="line.689"></a>
-<span class="sourceLineNo">690</span>            .setToken(PB_HELPER.convert(locatedBlock.getBlockToken())))<a name="line.690"></a>
-<span class="sourceLineNo">691</span>        .setClientName(clientName).build();<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.693"></a>
-<span class="sourceLineNo">694</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.694"></a>
-<span class="sourceLineNo">695</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.695"></a>
-<span class="sourceLineNo">696</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.696"></a>
-<span class="sourceLineNo">697</span>        .setRequestedChecksum(checksumProto)<a name="line.697"></a>
-<span class="sourceLineNo">698</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.700"></a>
-<span class="sourceLineNo">701</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.701"></a>
-<span class="sourceLineNo">702</span>      Enum&lt;?&gt; storageType = storageTypes[i];<a name="line.702"></a>
-<span class="sourceLineNo">703</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.703"></a>
-<span class="sourceLineNo">704</span>      futureList.add(promise);<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.705"></a>
... 22484 lines suppressed ...