You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by gi...@apache.org on 2020/03/17 14:55:21 UTC

[hbase-site] branch asf-site updated: Published site at 60de4c2e31fee195311a2b99846e8d69af5b2985.

This is an automated email from the ASF dual-hosted git repository.

git-site-role pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/hbase-site.git


The following commit(s) were added to refs/heads/asf-site by this push:
     new e33538a  Published site at 60de4c2e31fee195311a2b99846e8d69af5b2985.
e33538a is described below

commit e33538af24036a03940d769cad05ce07ae06373e
Author: jenkins <bu...@apache.org>
AuthorDate: Tue Mar 17 14:54:50 2020 +0000

    Published site at 60de4c2e31fee195311a2b99846e8d69af5b2985.
---
 acid-semantics.html                                |   2 +-
 apache_hbase_reference_guide.pdf                   |   4 +-
 book.html                                          |   2 +-
 bulk-loads.html                                    |   2 +-
 checkstyle-aggregate.html                          |   4 +-
 coc.html                                           |   2 +-
 dependencies.html                                  |   2 +-
 dependency-convergence.html                        | 580 ++++++++-------
 dependency-info.html                               |   2 +-
 dependency-management.html                         | 104 +--
 devapidocs/constant-values.html                    |   4 +-
 devapidocs/index-all.html                          |   2 +
 .../apache/hadoop/hbase/backup/package-tree.html   |   4 +-
 .../apache/hadoop/hbase/client/package-tree.html   |  20 +-
 .../hadoop/hbase/coprocessor/package-tree.html     |   2 +-
 .../apache/hadoop/hbase/executor/package-tree.html |   2 +-
 .../apache/hadoop/hbase/filter/package-tree.html   |   8 +-
 .../org/apache/hadoop/hbase/fs/HFileSystem.html    |   8 +-
 .../hadoop/hbase/hbtop/field/package-tree.html     |   2 +-
 .../hadoop/hbase/hbtop/terminal/package-tree.html  |   2 +-
 .../org/apache/hadoop/hbase/http/package-tree.html |   2 +-
 ...OneBlockAsyncDFSOutputHelper.CancelOnClose.html |   8 +-
 ...lockAsyncDFSOutputHelper.NameNodeException.html |   6 +-
 .../FanOutOneBlockAsyncDFSOutputHelper.html        |  72 +-
 ...utOneBlockAsyncDFSOutputHelper.FileCreator.html |   4 +
 .../bucket/PersistentIOEngine.DuFileCommand.html   |   4 +-
 .../apache/hadoop/hbase/io/hfile/package-tree.html |   8 +-
 .../org/apache/hadoop/hbase/ipc/package-tree.html  |   2 +-
 .../hadoop/hbase/mapreduce/package-tree.html       |   2 +-
 .../apache/hadoop/hbase/master/package-tree.html   |   6 +-
 .../hbase/master/procedure/package-tree.html       |   6 +-
 .../org/apache/hadoop/hbase/package-tree.html      |  16 +-
 .../hadoop/hbase/procedure2/package-tree.html      |   6 +-
 .../apache/hadoop/hbase/quotas/package-tree.html   |   4 +-
 .../hadoop/hbase/regionserver/package-tree.html    |  18 +-
 .../regionserver/querymatcher/package-tree.html    |   2 +-
 .../hbase/regionserver/wal/package-tree.html       |   4 +-
 .../hadoop/hbase/rest/model/package-tree.html      |   2 +-
 .../hadoop/hbase/security/access/package-tree.html |   4 +-
 .../apache/hadoop/hbase/security/package-tree.html |   2 +-
 .../org/apache/hadoop/hbase/util/package-tree.html |  12 +-
 .../src-html/org/apache/hadoop/hbase/Version.html  |   4 +-
 ...OneBlockAsyncDFSOutputHelper.CancelOnClose.html | 780 +++++++++++----------
 ...BlockAsyncDFSOutputHelper.DFSClientAdaptor.html | 780 +++++++++++----------
 ...utOneBlockAsyncDFSOutputHelper.FileCreator.html | 780 +++++++++++----------
 ...tOneBlockAsyncDFSOutputHelper.LeaseManager.html | 780 +++++++++++----------
 ...lockAsyncDFSOutputHelper.NameNodeException.html | 780 +++++++++++----------
 .../FanOutOneBlockAsyncDFSOutputHelper.html        | 780 +++++++++++----------
 downloads.html                                     |   2 +-
 export_control.html                                |   2 +-
 index.html                                         |   2 +-
 issue-tracking.html                                |   2 +-
 mail-lists.html                                    |   2 +-
 metrics.html                                       |   2 +-
 old_news.html                                      |   2 +-
 plugin-management.html                             |   6 +-
 plugins.html                                       |   2 +-
 poweredbyhbase.html                                |   2 +-
 project-info.html                                  |   2 +-
 project-reports.html                               |   2 +-
 project-summary.html                               |   2 +-
 pseudo-distributed.html                            |   2 +-
 replication.html                                   |   2 +-
 resources.html                                     |   2 +-
 source-repository.html                             |   2 +-
 sponsors.html                                      |   2 +-
 supportingprojects.html                            |   2 +-
 team-list.html                                     |   2 +-
 .../hbase/HBaseClusterManager.RemoteShell.html     |   4 +-
 .../hbase/HBaseClusterManager.RemoteSudoShell.html |   4 +-
 .../hadoop/hbase/chaos/actions/package-tree.html   |   2 +-
 .../http/TestHttpServer.MyGroupsProvider.html      |  23 +-
 .../org/apache/hadoop/hbase/http/package-tree.html |  14 +-
 .../io/TestFileLink.MyDistributedFileSystem.html   |  10 +-
 .../apache/hadoop/hbase/io/hfile/package-tree.html |   2 +-
 .../org/apache/hadoop/hbase/package-tree.html      |  12 +-
 .../hadoop/hbase/procedure/package-tree.html       |   8 +-
 .../hadoop/hbase/procedure2/package-tree.html      |   2 +-
 .../TestFSErrorsExposed.FaultyFileSystem.html      |   8 +-
 .../TestFSErrorsExposed.FaultyInputStream.html     |  26 +-
 .../TestHRegionFileSystem.MockFileSystem.html      |   6 +-
 ...tHRegionFileSystem.MockFileSystemForCreate.html |   6 +-
 .../regionserver/TestHStore.FaultyFileSystem.html  |   8 +-
 .../TestHStore.FaultyOutputStream.html             |  26 +-
 .../hadoop/hbase/regionserver/package-tree.html    |  10 +-
 ...ssController.MyShellBasedUnixGroupsMapping.html |  27 +-
 .../hadoop/hbase/security/access/package-tree.html |  14 +-
 .../org/apache/hadoop/hbase/test/package-tree.html |   2 +-
 ...DFSUtils.IsFileClosedDistributedFileSystem.html |  10 +-
 ...Utils.AlwaysFailSetStoragePolicyFileSystem.html |  10 +-
 .../apache/hadoop/hbase/util/TestFutureUtils.html  |  14 +-
 .../org/apache/hadoop/hbase/wal/package-tree.html  |   2 +-
 testdevapidocs/overview-tree.html                  |  16 +-
 .../apache/hadoop/hbase/http/log/TestLogLevel.html |   2 +-
 .../apache/hadoop/hbase/util/TestFutureUtils.html  | 137 ++--
 95 files changed, 3204 insertions(+), 2877 deletions(-)

diff --git a/acid-semantics.html b/acid-semantics.html
index a8af24c..aaaa732 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -467,7 +467,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 128af7c..b71ffd3 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.rc.2, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20200315153503+00'00')
-/CreationDate (D:20200315155032+00'00')
+/ModDate (D:20200317143942+00'00')
+/CreationDate (D:20200317145211+00'00')
 >>
 endobj
 2 0 obj
diff --git a/book.html b/book.html
index 05d7214..a76ad32 100644
--- a/book.html
+++ b/book.html
@@ -44952,7 +44952,7 @@ org/apache/hadoop/hbase/security/access/AccessControlClient.revoke:(Lorg/apache/
 <div id="footer">
 <div id="footer-text">
 Version 3.0.0-SNAPSHOT<br>
-Last updated 2020-03-15 15:35:03 UTC
+Last updated 2020-03-17 14:39:42 UTC
 </div>
 </div>
 <script type="text/x-mathjax-config">
diff --git a/bulk-loads.html b/bulk-loads.html
index 833198b..8dc948e 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -172,7 +172,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 6240bab..d577fb2 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -30897,7 +30897,7 @@
 <td>coding</td>
 <td>InnerAssignment</td>
 <td>Inner assignments should be avoided.</td>
-<td>336</td></tr></table></div>
+<td>354</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.io.compress.Compression.java">org/apache/hadoop/hbase/io/compress/Compression.java</h3>
 <table border="0" class="table table-striped">
@@ -81516,7 +81516,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/coc.html b/coc.html
index 477aecd..5973871 100644
--- a/coc.html
+++ b/coc.html
@@ -241,7 +241,7 @@ email to <a class="externalLink" href="mailto:private@hbase.apache.org">the priv
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependencies.html b/dependencies.html
index 8f26713..c2cfcf4 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -313,7 +313,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-convergence.html b/dependency-convergence.html
index ee61d17..994c220 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -170,58 +170,58 @@
 <td>42</td></tr>
 <tr class="a">
 <th>Number of dependencies (NOD):</th>
-<td>296</td></tr>
+<td>305</td></tr>
 <tr class="b">
 <th>Number of unique artifacts (NOA):</th>
-<td>304</td></tr>
+<td>315</td></tr>
 <tr class="a">
 <th>Number of version-conflicting artifacts (NOC):</th>
-<td>8</td></tr>
+<td>10</td></tr>
 <tr class="b">
 <th>Number of SNAPSHOT artifacts (NOS):</th>
 <td>0</td></tr>
 <tr class="a">
 <th>Convergence (NOD/NOA):</th>
-<td><img alt="[Error]" src="images/icon_error_sml.gif" />&#160;<b>97 %</b></td></tr>
+<td><img alt="[Error]" src="images/icon_error_sml.gif" />&#160;<b>96 %</b></td></tr>
 <tr class="b">
 <th>Ready for release (100% convergence and no SNAPSHOTS):</th>
 <td><img alt="[Error]" src="images/icon_error_sml.gif" />&#160;<b>Error</b><br />You do not have 100% convergence.</td></tr></table></div>
 <div class="section">
 <h3><a name="Dependencies_used_in_modules"></a>Dependencies used in modules</h3>
 <div class="section">
-<h4><a name="com.google.code.findbugs:jsr305"></a>com.google.code.findbugs:jsr305</h4>
+<h4><a name="com.fasterxml.woodstox:woodstox-core"></a>com.fasterxml.woodstox:woodstox-core</h4>
 <table border="0" class="table table-striped">
 <tr class="a">
 <td><img alt="[Error]" src="images/icon_error_sml.gif" /></td>
 <td>
 <table border="0" class="table table-striped">
 <tr class="b">
-<td width="25%">1.3.9</td>
+<td width="25%">5.0.3</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;com.google.guava:guava:jar:11.0.2:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(com.google.code.findbugs:jsr305:jar:1.3.9:compile - omitted for conflict with 3.0.0)<br /></li><br /></ol></td></tr>
+<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;com.fasterxml.woodstox:woodstox-core:jar:5.0.3:compile<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160 [...]
 <tr class="a">
-<td width="25%">3.0.0</td>
+<td width="25%">5.1.0</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;com.google.code.findbugs:jsr305:jar:3.0.0:compile<br /></li><br /></ol></td></tr></table></td></tr></table></div>
+<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />\-&#160;com.sun.xml.ws:jaxws-ri:pom:2.3.2:compile<br />&#160;&#160;&#160;\-&#160;com.sun.xml.ws:jaxws-rt:jar:2.3.2:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;com.sun.xml.ws:policy:jar:2.7.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;(com.fasterxml.woodstox:woodstox-core:jar:5.1.0:runtime - omitted for conflict with 5.0.3)<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(com.fasterxml.woo [...]
 <div class="section">
-<h4><a name="com.google.guava:guava"></a>com.google.guava:guava</h4>
+<h4><a name="com.google.code.findbugs:jsr305"></a>com.google.code.findbugs:jsr305</h4>
 <table border="0" class="table table-striped">
 <tr class="b">
 <td><img alt="[Error]" src="images/icon_error_sml.gif" /></td>
 <td>
 <table border="0" class="table table-striped">
 <tr class="a">
-<td width="25%">11.0.2</td>
+<td width="25%">1.3.9</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-client-jobclient:test-jar:tests:2.8.5:test<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.8.5:test<br />|&#160;&#160;|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-yarn-client:jar:2.8.5:test<br />|&#160;&#160;|&#160;&#160;|&#160;&#160;\-&#160;(com.google.guava:guava:jar:11.0.2:test - omitted for duplicate)<br />|&#160;&#160;|&#160;&#160;\-&#160;org.apac [...]
+<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;com.google.guava:guava:jar:11.0.2:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(com.google.code.findbugs:jsr305:jar:1.3.9:compile - omitted for conflict with 3.0.0)<br /></li><br /></ol></td></tr>
 <tr class="b">
-<td width="25%">16.0.1</td>
+<td width="25%">3.0.0</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.curator:curator-test:jar:2.7.1:test<br />&#160;&#160;&#160;&#160;&#160;&#160 [...]
+<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;com.google.code.findbugs:jsr305:jar:3.0.0:compile<br /></li><br /></ol></td></tr></table></td></tr></table></div>
 <div class="section">
 <h4><a name="com.nimbusds:nimbus-jose-jwt"></a>com.nimbusds:nimbus-jose-jwt</h4>
 <table border="0" class="table table-striped">
@@ -241,10 +241,10 @@
 <td width="25%">4.41.1</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-auth:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;com.nimbusds:nimbus-jose-jwt:jar:4.41.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-auth:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;com.nimbusds:nimbus-jose-jwt:jar:4.41.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-auth:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;com.nimbusds:nimbus-jose-jwt:jar:4.41.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-auth:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;com.nimbusds:nimbus-jose-jwt:jar:4.41.1:compile<br /></li><br /></ol></td></tr></table></td></tr></table></div>
+<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-auth:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;com.nimbusds:nimbus-jose-jwt:jar:4.41.1:compile<br /></li><br />
+<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-auth:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;com.nimbusds:nimbus-jose-jwt:jar:4.41.1:compile<br /></li><br />
+<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-auth:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;com.nimbusds:nimbus-jose-jwt:jar:4.41.1:compile<br /></li><br />
+<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-auth:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;com.nimbusds:nimbus-jose-jwt:jar:4.41.1:compile<br /></li><br /></ol></td></tr></table></td></tr></table></div>
 <div class="section">
 <h4><a name="commons-beanutils:commons-beanutils"></a>commons-beanutils:commons-beanutils</h4>
 <table border="0" class="table table-striped">
@@ -253,15 +253,43 @@
 <td>
 <table border="0" class="table table-striped">
 <tr class="a">
-<td width="25%">1.7.0</td>
+<td width="25%">1.9.2</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.7.0:compile - omitted for conflict with 1.9.2)<br /> [...]
+<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br /></li><br />
+<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br /></li><br />
+<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br /></li><br />
+<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br /></li><br />
+<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br /></li><br />
+<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br /></li><br />
+<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br /></li><br />
+<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br /></li><br />
+<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br /></li><br />
+<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160 [...]
+<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:test-jar:tests:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.2:compile - omitted for conflict with 1.9.4)<br /></li><br />
+<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br /></li><br />
+<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br /></li><br />
+<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br /></li><br /></ol></td></tr>
 <tr class="b">
-<td width="25%">1.9.2</td>
+<td width="25%">1.9.4</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:test-jar:tests:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br /></li><br /></ol></td></tr></table></td></tr></table></div>
+<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.4:test - omitted for conflict with 1.9.2)<br /></li><br />
+<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.4:test - omitted for conflict with 1.9.2)<br /></li><br />
+<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.4:test - omitted for conflict with 1.9.2)<br /></li><br />
+<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.4:test - omitted for conflict with 1.9.2)<br /></li><br />
+<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.4:test - omitted for conflict with 1.9.2)<br /></li><br />
+<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.4:test - omitted for conflict with 1.9.2)<br /></li><br />
+<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.4:test - omitted for conflict with 1.9.2)<br /></li><br />
+<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.4:test - omitted for conflict with 1.9.2)<br /></li><br />
+<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.4:test - omitted for conflict with 1.9.2)<br /></li><br />
+<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.4:test - omitted for conflict with 1.9.2)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.4:test - omitted for conflict with 1.9.2)<br /> [...]
+<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.4:compile<br /></li><br />
+<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.4:test - omitted for conflict with 1.9.2)<br /></li><br />
+<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.4:compile - omitted for conflict with 1.9.2)<br /></li><br />
+<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-beanutils:commons-beanutils:jar:1.9.4:test - omitted for conflict with 1.9.2)<br /></li><br /></ol></td></tr></table></td></tr></table></div>
 <div class="section">
 <h4><a name="commons-collections:commons-collections"></a>commons-collections:commons-collections</h4>
 <table border="0" class="table table-striped">
@@ -273,74 +301,74 @@
 <td width="25%">3.2.1</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />&#160;&#160;&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;& [...]
-<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:hadoop [...]
-<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br /></li><br />
-<li>org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:hadoop [...]
-<li>org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT<br />+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br [...]
-<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop:hado [...]
-<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop:hado [...]
-<li>org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache. [...]
-<li>org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#16 [...]
-<li>org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoo [...]
-<li>org.apache.hbase:hbase-hbtop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;& [...]
-<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop:hadoop-m [...]
-<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop:hadoop-com [...]
-<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:had [...]
-<li>org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />&#160;&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />&#160;&#160;&#160;\-&# [...]
-<li>org.apache.hbase:hbase-metrics:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />&#160;&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />&#160;&#160;&#160;\-&#160; [...]
-<li>org.apache.hbase:hbase-procedure:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:had [...]
-<li>org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:h [...]
-<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop:hadoop-m [...]
-<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop:hadoop [...]
-<li>org.apache.hbase:hbase-shaded-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-shaded-client-byo-hadoop:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#16 [...]
-<li>org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />&#160;&#160;&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;& [...]
-<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160 [...]
-<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.8.5:compile<br />|&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hbase:hbase-common:test-jar:tests:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validato [...]
-<li>org.apache.hbase:hbase-shaded-with-hadoop-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop:hadoop- [...]
-<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop: [...]
-<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:hadoop [...]
-<li>org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:had [...]
+<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />&#160;&#160;&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;& [...]
+<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:hadoop [...]
+<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br /></li><br />
+<li>org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:hadoop [...]
+<li>org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT<br />+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<b [...]
+<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop:hado [...]
+<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop:hado [...]
+<li>org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache. [...]
+<li>org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#16 [...]
+<li>org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoo [...]
+<li>org.apache.hbase:hbase-hbtop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;& [...]
+<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop:hadoop-m [...]
+<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop:hadoop-com [...]
+<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:had [...]
+<li>org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />&#160;&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />&#160;&#160;&#160;\-&# [...]
+<li>org.apache.hbase:hbase-metrics:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />&#160;&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />&#160;&#160;&#160;\-&#160; [...]
+<li>org.apache.hbase:hbase-procedure:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:had [...]
+<li>org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:h [...]
+<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop:hadoop-m [...]
+<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop:hadoop [...]
+<li>org.apache.hbase:hbase-shaded-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&# [...]
+<li>org.apache.hbase:hbase-shaded-client-byo-hadoop:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&# [...]
+<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#1 [...]
+<li>org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />&#160;&#160;&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&# [...]
+<li>org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;& [...]
+<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160 [...]
+<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-with-hadoop-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&# [...]
+<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop:hadoop- [...]
+<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />+-&#160;org.apache.hadoop: [...]
+<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:hadoop [...]
+<li>org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.2:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.1:compile - omitted for conflict with 3.2.2)<br />\-&#160;org.apache.hadoop:had [...]
 <tr class="a">
 <td width="25%">3.2.2</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-c [...]
-<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />|&#160; [...]
-<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />|&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.8.5:test<br />|&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:test - omitted for duplicate)<br />|&#160;&#160;&#160;&#160;&#160;\-&# [...]
-<li>org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br /></li><br />
-<li>org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT<br />+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br /></li><br />
-<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#160;o [...]
-<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />|&#16 [...]
-<li>org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compil [...]
-<li>org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160 [...]
-<li>org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br  [...]
-<li>org.apache.hbase:hbase-hbtop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;& [...]
-<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#160;org.a [...]
-<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />|&#160;&#16 [...]
-<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#160; [...]
-<li>org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2. [...]
-<li>org.apache.hbase:hbase-metrics:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:co [...]
-<li>org.apache.hbase:hbase-procedure:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br /></l [...]
-<li>org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />< [...]
-<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />|&#160;&# [...]
-<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#160;org [...]
-<li>org.apache.hbase:hbase-shaded-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-shaded-client-byo-hadoop:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.ha [...]
-<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />|&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.8.5:test<br />|&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:test - omitted for duplicate)<br />|&#160;&#160;&#160;&#160;&#1 [...]
-<li>org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />&#160;&#160;&#160;\-&#160;o [...]
-<li>org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop: [...]
-<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;|&#160;&#160; [...]
-<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.8.5:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />+-&#160;org.apache.hbase:hbase-common:test-jar:tests:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;|&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted f [...]
-<li>org.apache.hbase:hbase-shaded-with-hadoop-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#160;org. [...]
-<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#1 [...]
-<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#160;org [...]
-<li>org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br /></l [...]
+<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-c [...]
+<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />|&#160 [...]
+<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:test - omitted for duplicate)<br />|&#160;&#160;&#160;&#160;&#160;\- [...]
+<li>org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br /></li><br />
+<li>org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT<br />+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br /></li><br />
+<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#160; [...]
+<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />|&#1 [...]
+<li>org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compi [...]
+<li>org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160 [...]
+<li>org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br [...]
+<li>org.apache.hbase:hbase-hbtop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;& [...]
+<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#160;org. [...]
+<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />|&#160;&#1 [...]
+<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#160 [...]
+<li>org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2 [...]
+<li>org.apache.hbase:hbase-metrics:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;+-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:c [...]
+<li>org.apache.hbase:hbase-procedure:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br /></ [...]
+<li>org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br /> [...]
+<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />|&#160;& [...]
+<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#160;or [...]
+<li>org.apache.hbase:hbase-shaded-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&# [...]
+<li>org.apache.hbase:hbase-shaded-client-byo-hadoop:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.ha [...]
+<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:test - omitted for duplicate)<br />|&#160;&#160;&#160;&#160;& [...]
+<li>org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />&#160;&#160;&#160;\-&#160;o [...]
+<li>org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop: [...]
+<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;|&#160;&#160; [...]
+<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:compile<br />|&#160;&#160;+-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />|&#160;&#160;\-&#160;commons-beanutils:commons-beanutils:jar:1.9.4:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-common:test-jar:tests:3 [...]
+<li>org.apache.hbase:hbase-shaded-with-hadoop-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&# [...]
+<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#160;org [...]
+<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&# [...]
+<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br />\-&#160;or [...]
+<li>org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-collections:commons-collections:jar:3.2.2:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-collections:commons-collections:jar:3.2.2:compile<br /></ [...]
 <div class="section">
 <h4><a name="commons-digester:commons-digester"></a>commons-digester:commons-digester</h4>
 <table border="0" class="table table-striped">
@@ -352,70 +380,70 @@
 <td width="25%">1.8</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-hbtop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-metrics:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-procedure:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-shaded-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#16 [...]
-<li>org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160 [...]
-<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br /></li><br />
-<li>org.apache.hbase:hbase-shaded-with-hadoop-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
-<li>org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br /></ol></td></tr>
+<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#16 [...]
+<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:test - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-common:jar: [...]
+<li>org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br /></li><br />
+<li>org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
+<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:test - omitted for duplicate)<br /></li><br />
+<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:test - omitted for conflict with 1.8.1)<br />\-&#160;org.apache.hadoop:hadoop [...]
+<li>org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br /></li><br />
+<li>org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
+<li>org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br /></li><br />
+<li>org.apache.hbase:hbase-hbtop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br /></li><br />
+<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:test - omitted for duplicate)<br /></li><br />
+<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:test - omitted for conflict with 1.8.1)<br />\-&#160;org.apache.hadoop:hadoop-commo [...]
+<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:test - omitted for duplicate)<br /></li><br />
+<li>org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
+<li>org.apache.hbase:hbase-metrics:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for conflict with 1.8.1)<br /></li><br />
+<li>org.apache.hbase:hbase-procedure:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br /></li><br />
+<li>org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br /></li><br />
+<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:test - omitted for conflict with 1.8.1)<br />\-&#160;org.apache.hadoop:hadoop-com [...]
+<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:test - omitted for duplicate)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-client-byo-hadoop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:provided (scope not updated to compile)<br />&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:provided<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:provided (scope not updated to compile)<br />&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:provided<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br />\-&#160;org.apache.hbase:hbase-shaded-testing-u [...]
+<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br />\-&#160;org.apache.hbase:hbase-common:test-jar:tests:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted fo [...]
+<li>org.apache.hbase:hbase-shaded-with-hadoop-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br /></li><br />
+<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:test - omitted for duplicate)<br /></li><br />
+<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:compile - omitted for duplicate)<br />< [...]
+<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8:test - omitted for duplicate)<br /></li><br />
+<li>org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8:compile<br /></li><br /></ol></td></tr>
 <tr class="b">
 <td width="25%">1.8.1</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
+<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
 <li>org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT<br />\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
+<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
 <li>org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-hbtop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#1 [...]
-<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
+<li>org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-hbtop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#1 [...]
+<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
 <li>org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
 <li>org.apache.hbase:hbase-metrics:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-procedure:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-shaded-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#1 [...]
-<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160 [...]
-<li>org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160; [...]
+<li>org.apache.hbase:hbase-procedure:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#1 [...]
+<li>org.apache.hbase:hbase-shaded-client-byo-hadoop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160; [...]
 <li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:test-jar:tests:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
-<li>org.apache.hbase:hbase-shaded-with-hadoop-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#1 [...]
-<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br />
-<li>org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-digester:commons-digester:jar:1.8.1:compile<br /></li><br /></ol></td></tr></table></td></tr></table></div>
+<li>org.apache.hbase:hbase-shaded-with-hadoop-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#1 [...]
+<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br />
+<li>org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;commons-validator:commons-validator:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-digester:commons-digester:jar:1.8.1:compile - omitted for conflict with 1.8)<br /></li><br /></ol></td></tr></table></td></tr></table></div>
 <div class="section">
 <h4><a name="commons-lang:commons-lang"></a>commons-lang:commons-lang</h4>
 <table border="0" class="table table-striped">
@@ -427,121 +455,191 @@
 <td width="25%">2.4</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-hbtop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-metrics:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-procedure:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-shaded-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-shaded-client-byo-hadoop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:provided (scope not updated to compile)<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:provided<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:provided - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#16 [...]
-<li>org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:provided (scope not updated to compile)<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:provided<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:provided - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160 [...]
-<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-shaded-with-hadoop-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
-<li>org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br /></ol></td></tr>
+<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;& [...]
+<li>org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-hbtop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;& [...]
+<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-metrics:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-procedure:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;& [...]
+<li>org.apache.hbase:hbase-shaded-client-byo-hadoop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:provided (scope not updated to compile)<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:provided<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:provided - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#1 [...]
+<li>org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:provided (scope not updated to compile)<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:provided<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:provided - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#16 [...]
+<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-with-hadoop-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;& [...]
+<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br />
+<li>org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-configuration:commons-configuration:jar:1.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.4:compile - omitted for conflict with 2.6)<br /></li><br /></ol></td></tr>
 <tr class="a">
 <td width="25%">2.6</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />|&#160;&#160;|&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.8.5:compile<br />|&#160;&#160;|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.8.5:compile<br />|&#160;&#160;|&#160; [...]
-<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-hdfs:jar:2.8.5:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-jobclient:jar:2.8.5:test<br />|&#160;&#160 [...]
-<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-client:jar:2.8.5:test<br />|&#160;&#160;|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-api:jar:2.8.5:test<br />|&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:test - omitted for duplicate)<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:ja [...]
-<li>org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
-<li>org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
-<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.8.5:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.8.5:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-m [...]
-<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-hdfs:jar:2.8.5:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-client:jar:2.8.5:compile<br />|&#160;&#160;&#160;&#160;&#1 [...]
-<li>org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.8.5:compile<br />|&#160;&#160;|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.8.5:compile<br />|&#160;&#160;|&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-yarn-api:jar:2.8.5:compile<br />|&#160;&#160;|&#160;&#160;&#160;&#160;&#160;|&#160;&#160; [...]
-<li>org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
-<li>org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.8.5:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.8.5:compile<br />|&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-yarn-api:jar:2.8.5:compile<br />|&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(common [...]
-<li>org.apache.hbase:hbase-hbtop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
-<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-minikdc:jar:2.8.5:test<br />|&#160;&#160;+-&#160;org.apache.directory.server:apacheds-core-api:jar:2.0.0-M15:test<br />|&#160;&#160;|&#160;&#160;+-&#160;(commons-lang:commons-lang:jar:2.6:test - omitted for duplicate)<br />|&#160;&#160;|&#160;&#160;+-&#160;org.apache.directory.api:api-ldap-codec-core:jar:1.0.0-M20:test<br />|&#160;&#160;|&#160;&#160;|&#160;&#160;\-&#160;(commons-lang:commons-lang:jar [...]
-<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-hdfs:jar:2.8.5:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-hdfs:test-jar:tests:2.8.5:test<br />|&#160;&#160;|&#160;&#16 [...]
-<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-client-jobclient:test-jar:tests:2.8.5:test<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.8.5:test<br />|&#160;&#160;|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-client:jar:2.8.5:test<br />|&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:test - omitted for duplicate)<br />|&#160;&#160;\-&#160;org.apache.hadoo [...]
-<li>org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
-<li>org.apache.hbase:hbase-metrics:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
-<li>org.apache.hbase:hbase-procedure:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
-<li>org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
-<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-hdfs:jar:2.8.5:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-jobclient:jar:2.8.5:test<br />|&#160;&#160;| [...]
-<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-minikdc:jar:2.8.5:test<br />|&#160;&#160;+-&#160;org.apache.directory.server:apacheds-core-api:jar:2.0.0-M15:test<br />|&#160;&#160;|&#160;&#160;+-&#160;(commons-lang:commons-lang:jar:2.6:test - omitted for duplicate)<br />|&#160;&#160;|&#160;&#160;+-&#160;org.apache.directory.api:api-ldap-codec-core:jar:1.0.0-M20:test<br />|&#160;&#160;|&#160;&#160;|&#160;&#160;\-&#160;(commons-lang:commons-lang:j [...]
-<li>org.apache.hbase:hbase-shaded-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160 [...]
-<li>org.apache.hbase:hbase-shaded-client-byo-hadoop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:provided (scope not updated to compile)<br />&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:provided<br /></li><br />
-<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-client:jar:2.8.5:test<br />|&#160;&#160;|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-api:jar:2.8.5:test<br />|&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:test - omitted for duplicate)<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client- [...]
-<li>org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
-<li>org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:provided (scope not updated to compile)<br />|&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:provided<br />+-&#160;org.apache.hadoop:hadoop-hdfs:jar:2.8.5:provided (scope not updated to compile)<br />|&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:provided - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.8.5:provide [...]
-<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br />\-&#160;org.apache.hbase:hbase-shaded-testing-util:jar:3 [...]
-<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.8.5:compile<br />|&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br />+-&#160;org.apache.hadoop:hadoop-hdfs:test-jar:tests:2.8.5:compile<br />|&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-client-app:test-jar:tests:2.8.5:compile<br />|&#160;&#160;+-&#160;org. [...]
-<li>org.apache.hbase:hbase-shaded-with-hadoop-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
-<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />|&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br />+-&#160;org.apache.hadoop:hadoop-client:jar:2.8.5:compile<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-app:jar:2.8.5:compile<br />|&#160;&#160;|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.8.5:compile<br />|&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\ [...]
-<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />|&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br />+-&#160;org.apache.hadoop:hadoop-client:jar:2.8.5:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-api:jar:2.8.5:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-c [...]
-<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-hdfs:jar:2.8.5:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-hdfs:test-jar:tests:2.8.5:test<br />|&#160;&#160;&#160;&#16 [...]
-<li>org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br /></ol></td></tr></table></td></tr></table></div>
+<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;|&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />|&#160;&#160;|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-yarn-client:jar:2.10.0:compile<br />|&#160;&#160;|&#1 [...]
+<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-hdfs:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-jobclient:jar:2.10.0:test<br />|&#160;&#1 [...]
+<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-client:jar:2.10.0:test<br />|&#160;&#160;|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-api:jar:2.10.0:test<br />|&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:test - omitted for duplicate)<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core: [...]
+<li>org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
+<li>org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
+<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-yarn-client:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />|&#160;&#160;&#160;&#160;&#160;\-&#16 [...]
+<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-hdfs:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-client:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;& [...]
+<li>org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />|&#160;&#160;|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-yarn-client:jar:2.10.0:compile<br />|&#160;&#160;|&#160;&#160;|&#160;&#160;+-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />|&#160;&#160;|&#160;&#160;|&#160;&#160;\-&#1 [...]
+<li>org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
+<li>org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-yarn-client:jar:2.10.0:compile<br />|&#160;&#160;|&#160;&#160;+-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />|&#160;&#160;|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-api:jar:2.10.0:compile<br />|&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang: [...]
+<li>org.apache.hbase:hbase-hbtop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
+<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-minikdc:jar:2.10.0:test<br />|&#160;&#160;+-&#160;org.apache.directory.server:apacheds-core-api:jar:2.0.0-M15:test<br />|&#160;&#160;|&#160;&#160;+-&#160;(commons-lang:commons-lang:jar:2.6:test - omitted for duplicate)<br />|&#160;&#160;|&#160;&#160;+-&#160;org.apache.directory.api:api-ldap-codec-core:jar:1.0.0-M20:test<br />|&#160;&#160;|&#160;&#160;|&#160;&#160;\-&#160;(commons-lang:commons-lang:ja [...]
+<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-hdfs:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-hdfs:test-jar:tests:2.10.0:test<br />|&#160;&#160;|&#160;&# [...]
+<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-client-jobclient:test-jar:tests:2.10.0:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-mapreduce-client-shuffle:jar:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-nodemanager:jar:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:test - omitted for duplicate)<br />+-&#160;org [...]
+<li>org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
+<li>org.apache.hbase:hbase-metrics:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
+<li>org.apache.hbase:hbase-procedure:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
+<li>org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
+<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-hdfs:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-jobclient:jar:2.10.0:test<br />|&#160;&#160 [...]
+<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-minikdc:jar:2.10.0:test<br />|&#160;&#160;+-&#160;org.apache.directory.server:apacheds-core-api:jar:2.0.0-M15:test<br />|&#160;&#160;|&#160;&#160;+-&#160;(commons-lang:commons-lang:jar:2.6:test - omitted for duplicate)<br />|&#160;&#160;|&#160;&#160;+-&#160;org.apache.directory.api:api-ldap-codec-core:jar:1.0.0-M20:test<br />|&#160;&#160;|&#160;&#160;|&#160;&#160;\-&#160;(commons-lang:commons-lang: [...]
+<li>org.apache.hbase:hbase-shaded-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#16 [...]
+<li>org.apache.hbase:hbase-shaded-client-byo-hadoop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:provided (scope not updated to compile)<br />&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:provided<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-client:jar:2.10.0:test<br />|&#160;&#160;|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-api:jar:2.10.0:test<br />|&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:test - omitted for duplicate)<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-clien [...]
+<li>org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:provided (scope not updated to compile)<br />|&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:provided<br />+-&#160;org.apache.hadoop:hadoop-hdfs:jar:2.10.0:provided (scope not updated to compile)<br />|&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:provided - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:prov [...]
+<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br />\-&#160;org.apache.hbase:hbase-shaded-testing-util:jar: [...]
+<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br />+-&#160;org.apache.hadoop:hadoop-hdfs:test-jar:tests:2.10.0:compile<br />|&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-client-app:test-jar:tests:2.10.0:compile<br />|&#160;&#160;+-&#160;o [...]
+<li>org.apache.hbase:hbase-shaded-with-hadoop-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br />
+<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br />+-&#160;org.apache.hadoop:hadoop-client:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-api:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-clien [...]
+<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br />+-&#160;org.apache.hadoop:hadoop-client:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-api:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-mapreduc [...]
+<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-hdfs:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(commons-lang:commons-lang:jar:2.6:compile - omitted for duplicate)<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-hdfs:test-jar:tests:2.10.0:test<br />|&#160;&#160;&#160;&# [...]
+<li>org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;commons-lang:commons-lang:jar:2.6:compile<br /></li><br /></ol></td></tr></table></td></tr></table></div>
+<div class="section">
+<h4><a name="io.netty:netty"></a>io.netty:netty</h4>
+<table border="0" class="table table-striped">
+<tr class="b">
+<td><img alt="[Error]" src="images/icon_error_sml.gif" /></td>
+<td>
+<table border="0" class="table table-striped">
+<tr class="a">
+<td width="25%">3.10.6.Final</td>
+<td>
+<ol style="list-style-type: decimal">
+<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-mapreduce-client-jobclient:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.10.0:test<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;(io.netty:netty:jar:3.10.6.Final:test - omitted for conflict with 3.6.2.Final)<br />&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-shuffle:jar:2.10.0:test<br />&#160;&#160;&#160;|&#160;&#160 [...]
+<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-mapreduce-client-jobclient:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.10.0:test<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;(io.netty:netty:jar:3.10.6.Final:test - omitted for duplicate)<br />&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-shuffle:jar:2.10.0:test<br />&#160;&#160;&#160;|&#160;&#160;\-&#160; [...]
+<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-mapreduce-client-jobclient:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.10.0:test<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;(io.netty:netty:jar:3.10.6.Final:test - omitted for duplicate)<br />&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-shuffle:jar:2.10.0:test<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;(io [...]
+<tr class="b">
+<td width="25%">3.6.2.Final</td>
+<td>
+<ol style="list-style-type: decimal">
+<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;(io.netty:netty:jar:3.6.2.Final:compile - omitted for duplicate)<br />\-&#160;io.netty:netty:jar:3.6.2.Final:test (scope not updated to compile)<br /></li><br />
+<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;io.netty:netty:jar:3.6.2.Final:test<br /></li><br />
+<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />\-&#160;io.netty:netty:jar:3.6.2.Final:test<br /></li><br /></ol></td></tr></table></td></tr></table></div>
+<div class="section">
+<h4><a name="org.apache.commons:commons-compress"></a>org.apache.commons:commons-compress</h4>
+<table border="0" class="table table-striped">
+<tr class="a">
+<td><img alt="[Error]" src="images/icon_error_sml.gif" /></td>
+<td>
+<table border="0" class="table table-striped">
+<tr class="b">
+<td width="25%">1.19</td>
+<td>
+<ol style="list-style-type: decimal">
+<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;|&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.10.0:compile<br />| [...]
+<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:test - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:j [...]
+<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:test<br />|&#160;&#160;|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.10.0:test<br />|&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:test - omitted for duplicate)<br />|&#160;&#160;\-&#160;org.apache.hadoop: [...]
+<li>org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br /></li><br />
+<li>org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br /></li><br />
+<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:compile - omitted for duplicate)<br />+-&#160;org.apache.ha [...]
+<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:test - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-mapred [...]
+<li>org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:compile - omitted for duplicate)<br />\-&#160;org.apache [...]
+<li>org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br /></li><br />
+<li>org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:compile - omitted for duplicate)<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile [...]
+<li>org.apache.hbase:hbase-hbtop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br /></li><br />
+<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />&#160;&#160;&#160;|&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:test - omitted for duplicate)<br />&#160;&#160;&#160 [...]
+<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:test - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-cl [...]
+<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:compile - omitted for duplicate)<br />\-&# [...]
+<li>org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br /></li><br />
+<li>org.apache.hbase:hbase-metrics:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br /></li><br />
+<li>org.apache.hbase:hbase-procedure:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br /></li><br />
+<li>org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br /></li><br />
+<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:test<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:test - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-mapreduce- [...]
+<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:compile - omitted for duplicate)<br />\-&#160 [...]
+<li>org.apache.hbase:hbase-shaded-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;|&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br />&#160;&#160;&#160;&#160;&#160;& [...]
+<li>org.apache.hbase:hbase-shaded-client-byo-hadoop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:provided (scope not updated to compile)<br />&#160;&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:provided<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />|&#160;&#160;+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:test<br />|&#160;&#160;|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.10.0:test<br />|&#160;&#160;|&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:test - omitted for duplicate)<br />|&#160;&#160;\-&#160;org.apache. [...]
+<li>org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:provided (scope not updated to compile)<br />|&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:provided<br />\-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:provided (scope not updated to compile)<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.10.0:provided<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org. [...]
+<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br />\-&#160;org.apache.hbase:hbase-shaded-testin [...]
+<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-client-app:test-jar:tests:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.10 [...]
+<li>org.apache.hbase:hbase-shaded-with-hadoop-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br /></li><br />
+<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:compile - omitted for duplicate)<br />\-&#160; [...]
+<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:compile - omitted for duplicate)<br />\ [...]
+<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.19:compile - omitted for duplicate)<br />+-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br />\-&#160 [...]
+<li>org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.commons:commons-compress:jar:1.19:compile<br /></li><br /></ol></td></tr>
+<tr class="a">
+<td width="25%">1.4.1</td>
+<td>
+<ol style="list-style-type: decimal">
+<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#16 [...]
+<li>org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-hbtop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#16 [...]
+<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-metrics:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-procedure:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#16 [...]
+<li>org.apache.hbase:hbase-shaded-client-byo-hadoop:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:provided (scope not updated to compile)<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:provided<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:provided - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160; [...]
+<li>org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:provided (scope not updated to compile)<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:provided<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:provided - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;& [...]
+<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:test-jar:tests:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-shaded-with-hadoop-check-invariants:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#16 [...]
+<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-mapreduce-client-core:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br />
+<li>org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />&#160;&#160;&#160;\-&#160;org.apache.avro:avro:jar:1.7.7:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.apache.commons:commons-compress:jar:1.4.1:compile - omitted for conflict with 1.19)<br /></li><br /></ol></td></tr></table></td></tr></table></div>
 <div class="section">
-<h4><a name="org.javassist:javassist"></a>org.javassist:javassist</h4>
+<h4><a name="org.codehaus.woodstox:stax2-api"></a>org.codehaus.woodstox:stax2-api</h4>
 <table border="0" class="table table-striped">
 <tr class="b">
 <td><img alt="[Error]" src="images/icon_error_sml.gif" /></td>
 <td>
 <table border="0" class="table table-striped">
 <tr class="a">
-<td width="25%">3.18.1-GA</td>
+<td width="25%">3.1.4</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:test<br />&#160;&#160;&#160;&#160 [...]
-<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;& [...]
-<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:test<br />&#160;&#160;&#160 [...]
-<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.curator:curator-test:jar:2.7.1:test<br />&#160;&#160;&#160;&#160;&#160;&#160; [...]
-<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:test<br />&#160;&#160;&#160;&#160 [...]
-<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.curator:curator-test:jar:2.7.1:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#16 [...]
-<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160 [...]
-<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.curator:curator-test:jar:2.7.1:test<br />&#160;&#160;&#160;&#160;&#160;&#160 [...]
-<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#1 [...]
-<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.curator:curator-test:jar:2.7.1:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:test<br />&#160;&#16 [...]
-<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160; [...]
-<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:compile<br /> [...]
-<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.curator:curator-test:jar:2.7.1:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#1 [...]
-<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.curator:curator-test:jar:2.7.1:compile<br />&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hadoop:hadoop-minicluster:jar:2.8.5:test<br />&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.8.5:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.curator:curator-test:jar:2.7.1:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&# [...]
+<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />+-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />|&#160;&#160;\-&#160;org.apache.hadoop:hadoop-common:jar:2.10.0:compile<br />|&#160;&#160;&#160;&#160;&#160;+-&#160;org.codehaus.woodstox:stax2-api:jar:3.1.4:compile<br />|&#160;&#160;&#160;&#160;&#160;\-&#160;com.fasterxml.woodstox:woodstox-core:jar:5.0.3:compile<br />|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.codehaus.woodstox:stax2-api:ja [...]
 <tr class="b">
-<td width="25%">3.20.0-GA</td>
+<td width="25%">4.1</td>
 <td>
 <ol style="list-style-type: decimal">
-<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.glassfish.jersey.core:jersey-server:jar:2.25.1:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.javassist:javassist:jar:3.20.0-GA:compile<br /></li><br />
-<li>org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.jersey.core:jersey-server:jar:2.25.1:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#1 [...]
-<li>org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.jersey.core:jersey-server:jar:2.25.1:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.jersey.core:jersey-server:jar:2.25.1:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;& [...]
-<li>org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.jersey.core:jersey-server:jar:2.25.1:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;& [...]
-<li>org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT<br />\-&#160;org.glassfish.jersey.core:jersey-server:jar:2.25.1:compile<br />&#160;&#160;&#160;\-&#160;org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.javassist:javassist:jar:3.20.0-GA:compile<br /></li><br />
-<li>org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT<br />\-&#160;org.glassfish.jersey.core:jersey-client:jar:2.25.1:compile<br />&#160;&#160;&#160;\-&#160;org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.javassist:javassist:jar:3.20.0-GA:compile<br /></li><br />
-<li>org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.jersey.core:jersey-server:jar:2.25.1:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160; [...]
-<li>org.apache.hbase:hbase-rest:jar:3.0.0-SNAPSHOT<br />\-&#160;org.glassfish.jersey.containers:jersey-container-servlet-core:jar:2.25.1:compile<br />&#160;&#160;&#160;\-&#160;org.glassfish.jersey.core:jersey-common:jar:2.25.1:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.javassist:javassist:jar:3.20.0-GA:compile<br /></li><br />
-<li>org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.glassfish.jersey.core:jersey-server:jar:2.25.1:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.javassist:javassist:jar:3.20.0-GA:compile<br /></li><br />
-<li>org.apache.hbase:hbase-shaded-client-project:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.jersey.core:jersey-server:jar:2.25.1:test<br />&#160;&#160;&#160;&#160;&#160;&#160;& [...]
-<li>org.apache.hbase:hbase-shaded-testing-util-tester:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT:test<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.jersey.core:jersey-server:jar:2.25.1:test<br />&#160;&#160;&# [...]
-<li>org.apache.hbase:hbase-shaded-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.jersey.core:jersey-server:jar:2.25.1:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:compile<br />&#160;&#160;&#160;&#160;&#1 [...]
-<li>org.apache.hbase:hbase-shell:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.jersey.core:jersey-server:jar:2.25.1:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#16 [...]
-<li>org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.jersey.core:jersey-server:jar:2.25.1:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#1 [...]
-<li>org.apache.hbase:hbase-thrift:jar:3.0.0-SNAPSHOT<br />\-&#160;org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;\-&#160;org.apache.hbase:hbase-http:jar:3.0.0-SNAPSHOT:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.jersey.core:jersey-server:jar:2.25.1:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;org.glassfish.hk2:hk2-locator:jar:2.5.0-b32:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#1 [...]
+<li>org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT<br />\-&#160;com.sun.xml.ws:jaxws-ri:pom:2.3.2:compile<br />&#160;&#160;&#160;\-&#160;com.sun.xml.ws:jaxws-rt:jar:2.3.2:compile<br />&#160;&#160;&#160;&#160;&#160;&#160;\-&#160;(org.codehaus.woodstox:stax2-api:jar:4.1:runtime - omitted for conflict with 3.1.4)<br /></li><br /></ol></td></tr></table></td></tr></table></div></div>
         </div>
     </div>
     <hr/>
@@ -550,7 +648,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-info.html b/dependency-info.html
index cada09c..ec33f0c 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -194,7 +194,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-management.html b/dependency-management.html
index 27ff12d..6614d97 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -355,63 +355,63 @@
 <tr class="a">
 <td>org.apache.hadoop</td>
 <td>hadoop-annotations</td>
-<td>2.8.5</td>
+<td>2.10.0</td>
 <td>jar</td>
-<td><a class="externalLink" href="http://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
+<td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
 <tr class="b">
 <td>org.apache.hadoop</td>
 <td>hadoop-auth</td>
-<td>2.8.5</td>
+<td>2.10.0</td>
 <td>jar</td>
-<td><a class="externalLink" href="http://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
+<td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
 <tr class="a">
 <td>org.apache.hadoop</td>
 <td>hadoop-client</td>
-<td>2.8.5</td>
+<td>2.10.0</td>
 <td>jar</td>
-<td><a class="externalLink" href="http://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
+<td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
 <tr class="b">
 <td>org.apache.hadoop</td>
 <td>hadoop-common</td>
-<td>2.8.5</td>
+<td>2.10.0</td>
 <td>jar</td>
-<td><a class="externalLink" href="http://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
+<td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
 <tr class="a">
 <td>org.apache.hadoop</td>
 <td>hadoop-distcp</td>
-<td>2.8.5</td>
+<td>2.10.0</td>
 <td>jar</td>
-<td><a class="externalLink" href="http://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
+<td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
 <tr class="b">
 <td>org.apache.hadoop</td>
 <td>hadoop-hdfs</td>
-<td>2.8.5</td>
+<td>2.10.0</td>
 <td>jar</td>
-<td><a class="externalLink" href="http://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
+<td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
 <tr class="a">
 <td>org.apache.hadoop</td>
 <td>hadoop-hdfs-client</td>
-<td>2.8.5</td>
+<td>2.10.0</td>
 <td>jar</td>
-<td><a class="externalLink" href="http://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
+<td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
 <tr class="b">
 <td>org.apache.hadoop</td>
 <td>hadoop-mapreduce-client-core</td>
-<td>2.8.5</td>
+<td>2.10.0</td>
 <td>jar</td>
-<td><a class="externalLink" href="http://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
+<td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
 <tr class="a">
 <td>org.apache.hadoop</td>
 <td>hadoop-mapreduce-client-jobclient</td>
-<td>2.8.5</td>
+<td>2.10.0</td>
 <td>jar</td>
-<td><a class="externalLink" href="http://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
+<td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
 <tr class="b">
 <td>org.apache.hadoop</td>
 <td>hadoop-minicluster</td>
-<td>2.8.5</td>
+<td>2.10.0</td>
 <td>jar</td>
-<td><a class="externalLink" href="http://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
+<td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
 <tr class="a">
 <td>org.apache.hbase</td>
 <td><a class="externalLink" href="https://hbase.apache.org/hbase-annotations">hbase-annotations</a></td>
@@ -795,126 +795,132 @@
 <th>Type</th>
 <th>License</th></tr>
 <tr class="b">
+<td>javax.activation</td>
+<td><a class="externalLink" href="http://java.net/all/javax.activation-api/">javax.activation-api</a></td>
+<td>1.2.0</td>
+<td>jar</td>
+<td><a class="externalLink" href="https://github.com/javaee/activation/blob/master/LICENSE.txt">CDDL/GPLv2+CE</a></td></tr>
+<tr class="a">
 <td>org.apache.hadoop</td>
 <td>hadoop-common</td>
-<td>2.8.5</td>
+<td>2.10.0</td>
 <td>test-jar</td>
-<td><a class="externalLink" href="http://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="a">
+<td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
+<tr class="b">
 <td>org.apache.hadoop</td>
 <td>hadoop-hdfs</td>
-<td>2.8.5</td>
+<td>2.10.0</td>
 <td>test-jar</td>
-<td><a class="externalLink" href="http://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="b">
+<td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
+<tr class="a">
 <td>org.apache.hadoop</td>
 <td>hadoop-mapreduce-client-jobclient</td>
-<td>2.8.5</td>
+<td>2.10.0</td>
 <td>test-jar</td>
-<td><a class="externalLink" href="http://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="a">
+<td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
+<tr class="b">
 <td>org.apache.hadoop</td>
 <td>hadoop-minikdc</td>
-<td>2.8.5</td>
+<td>2.10.0</td>
 <td>jar</td>
-<td><a class="externalLink" href="http://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="b">
+<td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
+<tr class="a">
 <td>org.apache.hbase</td>
 <td><a class="externalLink" href="https://hbase.apache.org/hbase-build-configuration/hbase-common">hbase-common</a></td>
 <td>3.0.0-SNAPSHOT</td>
 <td>test-jar</td>
 <td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="a">
+<tr class="b">
 <td>org.apache.hbase</td>
 <td><a class="externalLink" href="https://hbase.apache.org/hbase-build-configuration/hbase-hadoop-compat">hbase-hadoop-compat</a></td>
 <td>3.0.0-SNAPSHOT</td>
 <td>test-jar</td>
 <td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="b">
+<tr class="a">
 <td>org.apache.hbase</td>
 <td><a class="externalLink" href="https://hbase.apache.org/hbase-build-configuration/hbase-hadoop2-compat">hbase-hadoop2-compat</a></td>
 <td>3.0.0-SNAPSHOT</td>
 <td>test-jar</td>
 <td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="a">
+<tr class="b">
 <td>org.apache.hbase</td>
 <td><a class="externalLink" href="https://hbase.apache.org/hbase-build-configuration/hbase-http">hbase-http</a></td>
 <td>3.0.0-SNAPSHOT</td>
 <td>test-jar</td>
 <td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="b">
+<tr class="a">
 <td>org.apache.hbase</td>
 <td><a class="externalLink" href="https://hbase.apache.org/hbase-build-configuration/hbase-it">hbase-it</a></td>
 <td>3.0.0-SNAPSHOT</td>
 <td>test-jar</td>
 <td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="a">
+<tr class="b">
 <td>org.apache.hbase</td>
 <td><a class="externalLink" href="https://hbase.apache.org/hbase-build-configuration/hbase-mapreduce">hbase-mapreduce</a></td>
 <td>3.0.0-SNAPSHOT</td>
 <td>test-jar</td>
 <td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="b">
+<tr class="a">
 <td>org.apache.hbase</td>
 <td><a class="externalLink" href="https://hbase.apache.org/hbase-build-configuration/hbase-metrics">hbase-metrics</a></td>
 <td>3.0.0-SNAPSHOT</td>
 <td>test-jar</td>
 <td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="a">
+<tr class="b">
 <td>org.apache.hbase</td>
 <td><a class="externalLink" href="https://hbase.apache.org/hbase-build-configuration/hbase-metrics-api">hbase-metrics-api</a></td>
 <td>3.0.0-SNAPSHOT</td>
 <td>test-jar</td>
 <td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="b">
+<tr class="a">
 <td>org.apache.hbase</td>
 <td><a class="externalLink" href="https://hbase.apache.org/hbase-build-configuration/hbase-server">hbase-server</a></td>
 <td>3.0.0-SNAPSHOT</td>
 <td>test-jar</td>
 <td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="a">
+<tr class="b">
 <td>org.apache.hbase</td>
 <td><a class="externalLink" href="https://hbase.apache.org/hbase-build-configuration/hbase-shell">hbase-shell</a></td>
 <td>3.0.0-SNAPSHOT</td>
 <td>test-jar</td>
 <td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="b">
+<tr class="a">
 <td>org.apache.hbase</td>
 <td><a class="externalLink" href="https://hbase.apache.org/hbase-build-configuration/hbase-testing-util">hbase-testing-util</a></td>
 <td>3.0.0-SNAPSHOT</td>
 <td>jar</td>
 <td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="a">
+<tr class="b">
 <td>org.apache.hbase</td>
 <td><a class="externalLink" href="https://hbase.apache.org/hbase-build-configuration/hbase-thrift">hbase-thrift</a></td>
 <td>3.0.0-SNAPSHOT</td>
 <td>test-jar</td>
 <td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="b">
+<tr class="a">
 <td>org.apache.hbase</td>
 <td><a class="externalLink" href="https://hbase.apache.org/hbase-build-configuration/hbase-zookeeper">hbase-zookeeper</a></td>
 <td>3.0.0-SNAPSHOT</td>
 <td>test-jar</td>
 <td><a class="externalLink" href="https://www.apache.org/licenses/LICENSE-2.0.txt">Apache License, Version 2.0</a></td></tr>
-<tr class="a">
+<tr class="b">
 <td>org.bouncycastle</td>
 <td><a class="externalLink" href="http://www.bouncycastle.org/java.html">bcprov-jdk15on</a></td>
 <td>1.60</td>
 <td>jar</td>
 <td><a class="externalLink" href="http://www.bouncycastle.org/licence.html">Bouncy Castle Licence</a></td></tr>
-<tr class="b">
+<tr class="a">
 <td>org.hamcrest</td>
 <td><a class="externalLink" href="https://github.com/hamcrest/JavaHamcrest/hamcrest-core">hamcrest-core</a></td>
 <td>1.3</td>
 <td>jar</td>
 <td><a class="externalLink" href="http://www.opensource.org/licenses/bsd-license.php">New BSD License</a></td></tr>
-<tr class="a">
+<tr class="b">
 <td>org.hamcrest</td>
 <td><a class="externalLink" href="https://github.com/hamcrest/JavaHamcrest/hamcrest-library">hamcrest-library</a></td>
 <td>1.3</td>
 <td>jar</td>
 <td><a class="externalLink" href="http://www.opensource.org/licenses/bsd-license.php">New BSD License</a></td></tr>
-<tr class="b">
+<tr class="a">
 <td>org.mockito</td>
 <td><a class="externalLink" href="https://github.com/mockito/mockito">mockito-core</a></td>
 <td>2.28.2</td>
@@ -928,7 +934,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index ac79574..0bd0990 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -4165,14 +4165,14 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#date">date</a></code></td>
-<td class="colLast"><code>"Sun Mar 15 15:45:24 UTC 2020"</code></td>
+<td class="colLast"><code>"Tue Mar 17 14:48:11 UTC 2020"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.revision">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#revision">revision</a></code></td>
-<td class="colLast"><code>"f66cbe1a40cd9d4cd4abd27c7ac9d1a5168b885f"</code></td>
+<td class="colLast"><code>"60de4c2e31fee195311a2b99846e8d69af5b2985"</code></td>
 </tr>
 <tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.srcChecksum">
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 7f01a05..d01028d 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -20461,6 +20461,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createFileCreator3--">createFileCreator3()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createFileCreator3_3--">createFileCreator3_3()</a></span> - Static method in class org.apache.hadoop.hbase.io.asyncfs.<a href="org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper</a></dt>
+<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/compactions/Compactor.html#createFileScanners-java.util.Collection-long-boolean-">createFileScanners(Collection&lt;HStoreFile&gt;, long, boolean)</a></span> - Method in class org.apache.hadoop.hbase.regionserver.compactions.<a href="org/apache/hadoop/hbase/regionserver/compactions/Compactor.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">Compactor</a></dt>
 <dd>
 <div class="block">Creates file scanners for compaction.</div>
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 2f3a624..4e23761 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -167,10 +167,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupRestoreConstants.BackupCommand</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index e29d2c4..cffbd1e 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -433,21 +433,21 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html b/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
index 06e569c..675c699 100644
--- a/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
@@ -201,8 +201,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">RegionObserver.MutationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.MetaTableOps.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">MetaTableMetrics.MetaTableOps</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">RegionObserver.MutationType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html b/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
index 0192935..880a54a 100644
--- a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
@@ -104,8 +104,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/ExecutorType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">ExecutorType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/EventType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">EventType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/ExecutorType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">ExecutorType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 85461b4..fbd42a3 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -191,12 +191,12 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.html b/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.html
index bcc1adb..1f146f1 100644
--- a/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.html
+++ b/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.html
@@ -171,7 +171,7 @@ extends org.apache.hadoop.fs.FilterFileSystem</pre>
 <!--   -->
 </a>
 <h3>Nested classes/interfaces inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>org.apache.hadoop.fs.FileSystem.Statistics</code></li>
+<code>org.apache.hadoop.fs.FileSystem.DirectoryEntries, org.apache.hadoop.fs.FileSystem.Statistics</code></li>
 </ul>
 </li>
 </ul>
@@ -216,7 +216,7 @@ extends org.apache.hadoop.fs.FilterFileSystem</pre>
 <!--   -->
 </a>
 <h3>Fields inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX</code></li>
+<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX, USER_HOME_PREFIX</code></li>
 </ul>
 </li>
 </ul>
@@ -365,14 +365,14 @@ extends org.apache.hadoop.fs.FilterFileSystem</pre>
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.fs.FilterFileSystem</h3>
-<code>access, append, canonicalizeUri, checkPath, completeLocalOutput, concat, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, create, create, createNonRecursive, createSnapshot, createSymlink, delete, deleteSnapshot, getAclStatus, getAllStoragePolicies, getCanonicalUri, getChildFileSystems, getConf, getDefaultBlockSize, getDefaultBlockSize, getDefaultReplication, getDefaultReplication, getFileBlockLocations, getFileChecksum, getFileChecksum, getFileLinkStatus,  [...]
+<code>access, append, appendFile, canonicalizeUri, checkPath, completeLocalOutput, concat, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, create, create, createFile, createNonRecursive, createSnapshot, createSymlink, delete, deleteSnapshot, getAclStatus, getAllStoragePolicies, getCanonicalUri, getChildFileSystems, getConf, getDefaultBlockSize, getDefaultBlockSize, getDefaultReplication, getDefaultReplication, getFileBlockLocations, getFileChecksum, getFileCheck [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.fs.FileSystem">
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>addDelegationTokens, append, append, areSymlinksEnabled, cancelDeleteOnExit, clearStatistics, closeAll, closeAllForUGI, copyFromLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, create, createNewFile, createNonRecursive, createSnapshot, delete, deleteOnExit, enableSymlinks, exists, fixRelativePart, get, get, getAllStatistics, getBlockSize, getCanonicalServiceName, getContentSummary, getDefaultPort, getDefaul [...]
+<code>addDelegationTokens, append, append, areSymlinksEnabled, cancelDeleteOnExit, clearStatistics, closeAll, closeAllForUGI, copyFromLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, create, createNewFile, createNonRecursive, createSnapshot, delete, deleteOnExit, enableSymlinks, exists, fixRelativePart, get, get, getAllStatistics, getBlockSize, getCanonicalServiceName, getContentSummary, getDefaultPort, getDefaul [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.conf.Configured">
diff --git a/devapidocs/org/apache/hadoop/hbase/hbtop/field/package-tree.html b/devapidocs/org/apache/hadoop/hbase/hbtop/field/package-tree.html
index d9bf09d..cd793c2 100644
--- a/devapidocs/org/apache/hadoop/hbase/hbtop/field/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/hbtop/field/package-tree.html
@@ -92,8 +92,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.hbtop.field.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/field/FieldValueType.html" title="enum in org.apache.hadoop.hbase.hbtop.field"><span class="typeNameLink">FieldValueType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.hbtop.field.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/field/Field.html" title="enum in org.apache.hadoop.hbase.hbtop.field"><span class="typeNameLink">Field</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.hbtop.field.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/field/FieldValueType.html" title="enum in org.apache.hadoop.hbase.hbtop.field"><span class="typeNameLink">FieldValueType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html
index 007e90f..b8f3866 100644
--- a/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html
@@ -107,8 +107,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.hbtop.terminal.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/terminal/KeyPress.Type.html" title="enum in org.apache.hadoop.hbase.hbtop.terminal"><span class="typeNameLink">KeyPress.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.hbtop.terminal.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/terminal/Color.html" title="enum in org.apache.hadoop.hbase.hbtop.terminal"><span class="typeNameLink">Color</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.hbtop.terminal.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/terminal/KeyPress.Type.html" title="enum in org.apache.hadoop.hbase.hbtop.terminal"><span class="typeNameLink">KeyPress.Type</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/http/package-tree.html b/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
index 54adced..2dbe190 100644
--- a/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
@@ -140,8 +140,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Output.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Output</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/HttpConfig.Policy.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">HttpConfig.Policy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Output.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Output</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Event.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Event</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
index 081364d..89fcff5 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.258">FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose</a>
+<pre>static final class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.276">FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html" title="interface in org.apache.hadoop.hbase.util">CancelableProgressable</a></pre>
 </li>
@@ -208,7 +208,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/util/CancelablePro
 <ul class="blockListLast">
 <li class="blockList">
 <h4>client</h4>
-<pre>private final&nbsp;org.apache.hadoop.hdfs.DFSClient <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html#line.260">client</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.hdfs.DFSClient <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html#line.278">client</a></pre>
 </li>
 </ul>
 </li>
@@ -225,7 +225,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/util/CancelablePro
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CancelOnClose</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html#line.262">CancelOnClose</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client)</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html#line.280">CancelOnClose</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client)</pre>
 </li>
 </ul>
 </li>
@@ -242,7 +242,7 @@ implements <a href="../../../../../../org/apache/hadoop/hbase/util/CancelablePro
 <ul class="blockListLast">
 <li class="blockList">
 <h4>progress</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html#line.267">progress</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html#line.285">progress</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/util/CancelableProgressable.html#progress--">CancelableProgressable</a></code></span></div>
 <div class="block">Report progress.  Returns true if operations should continue, false if the
  operation should be canceled and rolled back.</div>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
index 94cb926..cee189b 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
@@ -126,7 +126,7 @@
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.462">FanOutOneBlockAsyncDFSOutputHelper.NameNodeException</a>
+<pre>public static class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.480">FanOutOneBlockAsyncDFSOutputHelper.NameNodeException</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Exception other than RemoteException thrown when calling create on namenode</div>
 <dl>
@@ -215,7 +215,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>serialVersionUID</h4>
-<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html#line.464">serialVersionUID</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html#line.482">serialVersionUID</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.serialVersionUID">Constant Field Values</a></dd>
@@ -236,7 +236,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>NameNodeException</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html#line.466">NameNodeException</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&nbsp;cause)</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html#line.484">NameNodeException</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&nbsp;cause)</pre>
 </li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html
index 3e79818..5ed4536 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html
@@ -18,7 +18,7 @@
     catch(err) {
     }
 //-->
-var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9};
+var methods = {"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -287,13 +287,17 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </tr>
 <tr id="i7" class="rowColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createFileCreator3--">createFileCreator3</a></span>()</code>&nbsp;</td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createFileCreator3_3--">createFileCreator3_3</a></span>()</code>&nbsp;</td>
 </tr>
 <tr id="i8" class="altColor">
+<td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a></code></td>
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createFileCreator3--">createFileCreator3</a></span>()</code>&nbsp;</td>
+</tr>
+<tr id="i9" class="rowColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.LeaseManager</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createLeaseManager--">createLeaseManager</a></span>()</code>&nbsp;</td>
 </tr>
-<tr id="i9" class="rowColor">
+<tr id="i10" class="altColor">
 <td class="colFirst"><code>static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createOutput-org.apache.hadoop.hdfs.DistributedFileSystem-org.apache.hadoop.fs.Path-boolean-boolean-short-long-org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup-java.lang.Class-">createOutput</a></span>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs,
             org.apache.hadoop.fs.Path&nbsp;f,
@@ -306,7 +310,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <div class="block">Create a <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs"><code>FanOutOneBlockAsyncDFSOutput</code></a>.</div>
 </td>
 </tr>
-<tr id="i10" class="altColor">
+<tr id="i11" class="rowColor">
 <td class="colFirst"><code>private static <a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createOutput-org.apache.hadoop.hdfs.DistributedFileSystem-java.lang.String-boolean-boolean-short-long-org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup-java.lang.Class-">createOutput</a></span>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs,
             <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;src,
@@ -317,16 +321,16 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
             org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup&nbsp;eventLoopGroup,
             <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends org.apache.hbase.thirdparty.io.netty.channel.Channel&gt;&nbsp;channelClass)</code>&nbsp;</td>
 </tr>
-<tr id="i11" class="rowColor">
+<tr id="i12" class="altColor">
 <td class="colFirst"><code>(package private) static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#endFileLease-org.apache.hadoop.hdfs.DFSClient-long-">endFileLease</a></span>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
             long&nbsp;inodeId)</code>&nbsp;</td>
 </tr>
-<tr id="i12" class="altColor">
+<tr id="i13" class="rowColor">
 <td class="colFirst"><code>(package private) static org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#getStatus-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto-">getStatus</a></span>(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto&nbsp;ack)</code>&nbsp;</td>
 </tr>
-<tr id="i13" class="rowColor">
+<tr id="i14" class="altColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#initialize-org.apache.hadoop.conf.Configuration-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.hdfs.protocol.DatanodeInfo-org.apache.hadoop.fs.StorageType-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-int-org.apache.hadoop.hdfs.DFSClient-org.apache.hadoop.security.token.Tok [...]
           org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
@@ -338,24 +342,24 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
           org.apache.hadoop.security.token.Token&lt;org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier&gt;&nbsp;accessToken,
           org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise&lt;org.apache.hbase.thirdparty.io.netty.channel.Channel&gt;&nbsp;promise)</code>&nbsp;</td>
 </tr>
-<tr id="i14" class="altColor">
+<tr id="i15" class="rowColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#processWriteBlockResponse-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.hdfs.protocol.DatanodeInfo-org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise-int-">processWriteBlockResponse</a></span>(org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
                          org.apache.hadoop.hdfs.protocol.DatanodeInfo&nbsp;dnInfo,
                          org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise&lt;org.apache.hbase.thirdparty.io.netty.channel.Channel&gt;&nbsp;promise,
                          int&nbsp;timeoutMs)</code>&nbsp;</td>
 </tr>
-<tr id="i15" class="rowColor">
+<tr id="i16" class="altColor">
 <td class="colFirst"><code>private static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#requestWriteBlock-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.fs.StorageType-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder-">requestWriteBlock</a></span>(org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
                  org.apache.hadoop.fs.StorageType&nbsp;storageType,
                  org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder&nbsp;writeBlockProtoBuilder)</code>&nbsp;</td>
 </tr>
-<tr id="i16" class="altColor">
+<tr id="i17" class="rowColor">
 <td class="colFirst"><code>static boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#shouldRetryCreate-org.apache.hadoop.ipc.RemoteException-">shouldRetryCreate</a></span>(org.apache.hadoop.ipc.RemoteException&nbsp;e)</code>&nbsp;</td>
 </tr>
-<tr id="i17" class="rowColor">
+<tr id="i18" class="altColor">
 <td class="colFirst"><code>(package private) static void</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#sleepIgnoreInterrupt-int-">sleepIgnoreInterrupt</a></span>(int&nbsp;retry)</code>&nbsp;</td>
 </tr>
@@ -540,13 +544,27 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </dl>
 </li>
 </ul>
+<a name="createFileCreator3_3--">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>createFileCreator3_3</h4>
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.224">createFileCreator3_3</a>()
+                                                                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
+<dl>
+<dt><span class="throwsLabel">Throws:</span></dt>
+<dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></code></dd>
+</dl>
+</li>
+</ul>
 <a name="createFileCreator3--">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>createFileCreator3</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.224">createFileCreator3</a>()
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.236">createFileCreator3</a>()
                                                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -560,7 +578,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createFileCreator2</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.236">createFileCreator2</a>()
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.248">createFileCreator2</a>()
                                                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -574,7 +592,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createFileCreator</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.248">createFileCreator</a>()
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.260">createFileCreator</a>()
                                                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/NoSuchMethodException.html?is-external=true" title="class or interface in java.lang">NoSuchMethodException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -588,7 +606,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>beginFileLease</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.286">beginFileLease</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
+<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.304">beginFileLease</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
                            long&nbsp;inodeId)</pre>
 </li>
 </ul>
@@ -598,7 +616,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>endFileLease</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.290">endFileLease</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
+<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.308">endFileLease</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
                          long&nbsp;inodeId)</pre>
 </li>
 </ul>
@@ -608,7 +626,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createChecksum</h4>
-<pre>static&nbsp;org.apache.hadoop.util.DataChecksum&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.294">createChecksum</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client)</pre>
+<pre>static&nbsp;org.apache.hadoop.util.DataChecksum&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.312">createChecksum</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client)</pre>
 </li>
 </ul>
 <a name="getStatus-org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto-">
@@ -617,7 +635,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getStatus</h4>
-<pre>static&nbsp;org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.298">getStatus</a>(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto&nbsp;ack)</pre>
+<pre>static&nbsp;org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.316">getStatus</a>(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto&nbsp;ack)</pre>
 </li>
 </ul>
 <a name="processWriteBlockResponse-org.apache.hbase.thirdparty.io.netty.channel.Channel-org.apache.hadoop.hdfs.protocol.DatanodeInfo-org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise-int-">
@@ -626,7 +644,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>processWriteBlockResponse</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.310">processWriteBlockResponse</a>(org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.328">processWriteBlockResponse</a>(org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
                                               org.apache.hadoop.hdfs.protocol.DatanodeInfo&nbsp;dnInfo,
                                               org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise&lt;org.apache.hbase.thirdparty.io.netty.channel.Channel&gt;&nbsp;promise,
                                               int&nbsp;timeoutMs)</pre>
@@ -638,7 +656,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>requestWriteBlock</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.371">requestWriteBlock</a>(org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.389">requestWriteBlock</a>(org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
                                       org.apache.hadoop.fs.StorageType&nbsp;storageType,
                                       org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.Builder&nbsp;writeBlockProtoBuilder)
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -654,7 +672,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>initialize</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.384">initialize</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.402">initialize</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                org.apache.hbase.thirdparty.io.netty.channel.Channel&nbsp;channel,
                                org.apache.hadoop.hdfs.protocol.DatanodeInfo&nbsp;dnInfo,
                                org.apache.hadoop.fs.StorageType&nbsp;storageType,
@@ -676,7 +694,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>connectToDataNodes</h4>
-<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hbase.thirdparty.io.netty.util.concurrent.Future&lt;org.apache.hbase.thirdparty.io.netty.channel.Channel&gt;&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.405">connectToDataNodes</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>private static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hbase.thirdparty.io.netty.util.concurrent.Future&lt;org.apache.hbase.thirdparty.io.netty.channel.Channel&gt;&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.423">connectToDataNodes</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                                                                                                                           org.apache.hadoop.hdfs.DFSClient&nbsp;client,
                                                                                                                                                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;clientName,
                                                                                                                                                           org.apache.hadoop.hdfs.protocol.LocatedBlock&nbsp;locatedBlock,
@@ -694,7 +712,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createOutput</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.471">createOutput</a>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs,
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.489">createOutput</a>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs,
                                                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;src,
                                                          boolean&nbsp;overwrite,
                                                          boolean&nbsp;createParent,
@@ -715,7 +733,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createOutput</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.568">createOutput</a>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs,
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.html" title="class in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutput</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.586">createOutput</a>(org.apache.hadoop.hdfs.DistributedFileSystem&nbsp;dfs,
                                                         org.apache.hadoop.fs.Path&nbsp;f,
                                                         boolean&nbsp;overwrite,
                                                         boolean&nbsp;createParent,
@@ -738,7 +756,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldRetryCreate</h4>
-<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.587">shouldRetryCreate</a>(org.apache.hadoop.ipc.RemoteException&nbsp;e)</pre>
+<pre>public static&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.605">shouldRetryCreate</a>(org.apache.hadoop.ipc.RemoteException&nbsp;e)</pre>
 </li>
 </ul>
 <a name="completeFile-org.apache.hadoop.hdfs.DFSClient-org.apache.hadoop.hdfs.protocol.ClientProtocol-java.lang.String-java.lang.String-org.apache.hadoop.hdfs.protocol.ExtendedBlock-long-">
@@ -747,7 +765,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>completeFile</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.594">completeFile</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
+<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.612">completeFile</a>(org.apache.hadoop.hdfs.DFSClient&nbsp;client,
                          org.apache.hadoop.hdfs.protocol.ClientProtocol&nbsp;namenode,
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;src,
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;clientName,
@@ -761,7 +779,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>sleepIgnoreInterrupt</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.619">sleepIgnoreInterrupt</a>(int&nbsp;retry)</pre>
+<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#line.637">sleepIgnoreInterrupt</a>(int&nbsp;retry)</pre>
 </li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html
index b69d756..846e995 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/class-use/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html
@@ -125,6 +125,10 @@
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a></code></td>
+<td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createFileCreator3_3--">createFileCreator3_3</a></span>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>private static <a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html" title="interface in org.apache.hadoop.hbase.io.asyncfs">FanOutOneBlockAsyncDFSOutputHelper.FileCreator</a></code></td>
 <td class="colLast"><span class="typeNameLabel">FanOutOneBlockAsyncDFSOutputHelper.</span><code><span class="memberNameLink"><a href="../../../../../../../org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html#createFileCreator3--">createFileCreator3</a></span>()</code>&nbsp;</td>
 </tr>
 </tbody>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/PersistentIOEngine.DuFileCommand.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/PersistentIOEngine.DuFileCommand.html
index 2b7695d..3be833e 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/PersistentIOEngine.DuFileCommand.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/PersistentIOEngine.DuFileCommand.html
@@ -219,14 +219,14 @@ extends org.apache.hadoop.util.Shell.ShellCommandExecutor</pre>
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.util.Shell.ShellCommandExecutor</h3>
-<code>close, execute, getOutput, parseExecResult, toString</code></li>
+<code>close, execute, getOutput, getTimeoutInterval, parseExecResult, toString</code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.util.Shell">
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.util.Shell</h3>
-<code>appendScriptExtension, appendScriptExtension, checkIsBashSupported, checkWindowsCommandLineLength, execCommand, execCommand, execCommand, getCheckProcessIsAliveCommand, getEnvironment, getEnvironmentVariableRegex, getExitCode, getGetPermissionCommand, getGroupsCommand, getGroupsForUserCommand, getGroupsIDForUserCommand, getHadoopHome, getProcess, getQualifiedBin, getQualifiedBinPath, getReadlinkCommand, getRunScriptCommand, getSetOwnerCommand, getSetPermissionCommand, getSetPermiss [...]
+<code>appendScriptExtension, appendScriptExtension, checkIsBashSupported, checkWindowsCommandLineLength, destroyAllShellProcesses, execCommand, execCommand, execCommand, getAllShells, getCheckProcessIsAliveCommand, getEnvironment, getEnvironmentVariableRegex, getExitCode, getGetPermissionCommand, getGroupsCommand, getGroupsForUserCommand, getGroupsIDForUserCommand, getHadoopHome, getMemlockLimit, getProcess, getQualifiedBin, getQualifiedBinPath, getReadlinkCommand, getRunScriptCommand, g [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index c2eadce..a8675aa 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -305,12 +305,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheFactory.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockCacheFactory.ExternalBlockCaches</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/ReaderContext.ReaderType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">ReaderContext.ReaderType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index 3c8df2d..b37ca81 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -359,8 +359,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallAction</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/CallEvent.Type.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">CallEvent.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallAction</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.SourceStorage.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">MetricsHBaseServerSourceFactoryImpl.SourceStorage</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index 73547d0..1684038 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -298,8 +298,8 @@
 <ul>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/CellCounter.CellCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">CellCounter.CellCounterMapper.Counters</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index f560d9a..5c7e1c4 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -356,12 +356,12 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/ServerManager.ServerLiveState.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">ServerManager.ServerLiveState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetaRegionLocationCache.ZNodeOpType.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetaRegionLocationCache.ZNodeOpType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/ServerManager.ServerLiveState.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">ServerManager.ServerLiveState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">RegionState.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetaRegionLocationCache.ZNodeOpType.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetaRegionLocationCache.ZNodeOpType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index 5695636..11325d3 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -222,10 +222,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MetaProcedureInterface.MetaOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">MetaProcedureInterface.MetaOperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">TableProcedureInterface.TableOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MetaProcedureInterface.MetaOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">MetaProcedureInterface.MetaOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index 4c20c81..23d2110 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -429,19 +429,19 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index ff124c8..4aef9f8 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -216,11 +216,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">StateMachineProcedure.Flow</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.LockState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">Procedure.LockState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">StateMachineProcedure.Flow</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index 9a24193..3dd0da6 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -241,11 +241,11 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaScope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 5a7a6d0..e89d990 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -735,20 +735,20 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index 7f09712..cf0a47c 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -130,9 +130,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.MatchCode.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">ScanQueryMatcher.MatchCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.MatchCode.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">ScanQueryMatcher.MatchCode</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
index 09e952d..2639a2c 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
@@ -247,10 +247,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.RollRequestReason.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">WALActionsListener.RollRequestReason</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.Type.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">RingBufferTruck.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/CompressionContext.DictionaryIndex.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">CompressionContext.DictionaryIndex</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrResult.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">ProtobufLogReader.WALHdrResult</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/CompressionContext.DictionaryIndex.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">CompressionContext.DictionaryIndex</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.RollRequestReason.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">WALActionsListener.RollRequestReason</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
index 795500d..b47c8cc 100644
--- a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
@@ -110,8 +110,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.FilterType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.FilterType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.FilterType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.FilterType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
index 30bb722..9ec1864 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
@@ -162,12 +162,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Scope.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Scope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessController.OpType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.HDFSAclOperation.AclType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">SnapshotScannerHDFSAclHelper.HDFSAclOperation.AclType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Scope.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Scope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.HDFSAclOperation.OperationType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">SnapshotScannerHDFSAclHelper.HDFSAclOperation.OperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Action.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Action</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.HDFSAclOperation.AclType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">SnapshotScannerHDFSAclHelper.HDFSAclOperation.AclType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
index c793825..c060011 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
@@ -189,9 +189,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">AuthMethod</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslUtil.QualityOfProtection.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslUtil.QualityOfProtection</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslStatus</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index a93f658..2d14cd5 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -559,15 +559,15 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLockWithObjectPool.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLockWithObjectPool.ReferenceType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/DNS.ServerType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">DNS.ServerType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HbckErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HbckErrorReporter.ERROR_CODE</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/DNS.ServerType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">DNS.ServerType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLockWithObjectPool.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLockWithObjectPool.ReferenceType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index c67e09b..08a68af 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -18,9 +18,9 @@
 <span class="sourceLineNo">010</span>  justification="Intentional; to be modified in test")<a name="line.10"></a>
 <span class="sourceLineNo">011</span>public class Version {<a name="line.11"></a>
 <span class="sourceLineNo">012</span>  public static final String version = new String("3.0.0-SNAPSHOT");<a name="line.12"></a>
-<span class="sourceLineNo">013</span>  public static final String revision = "f66cbe1a40cd9d4cd4abd27c7ac9d1a5168b885f";<a name="line.13"></a>
+<span class="sourceLineNo">013</span>  public static final String revision = "60de4c2e31fee195311a2b99846e8d69af5b2985";<a name="line.13"></a>
 <span class="sourceLineNo">014</span>  public static final String user = "jenkins";<a name="line.14"></a>
-<span class="sourceLineNo">015</span>  public static final String date = "Sun Mar 15 15:45:24 UTC 2020";<a name="line.15"></a>
+<span class="sourceLineNo">015</span>  public static final String date = "Tue Mar 17 14:48:11 UTC 2020";<a name="line.15"></a>
 <span class="sourceLineNo">016</span>  public static final String url = "git://jenkins-websites-he-de.apache.org/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";<a name="line.16"></a>
 <span class="sourceLineNo">017</span>  public static final String srcChecksum = "(stdin)=";<a name="line.17"></a>
 <span class="sourceLineNo">018</span>}<a name="line.18"></a>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
index 6684af5..3556576 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.CancelOnClose.html
@@ -229,408 +229,426 @@
 <span class="sourceLineNo">221</span>    };<a name="line.221"></a>
 <span class="sourceLineNo">222</span>  }<a name="line.222"></a>
 <span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.224"></a>
+<span class="sourceLineNo">224</span>  private static FileCreator createFileCreator3_3() throws NoSuchMethodException {<a name="line.224"></a>
 <span class="sourceLineNo">225</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.226"></a>
-<span class="sourceLineNo">227</span>      CryptoProtocolVersion[].class, String.class);<a name="line.227"></a>
+<span class="sourceLineNo">226</span>        String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        CryptoProtocolVersion[].class, String.class, String.class);<a name="line.227"></a>
 <span class="sourceLineNo">228</span><a name="line.228"></a>
 <span class="sourceLineNo">229</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.229"></a>
 <span class="sourceLineNo">230</span>        supportedVersions) -&gt; {<a name="line.230"></a>
 <span class="sourceLineNo">231</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.231"></a>
-<span class="sourceLineNo">232</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.232"></a>
+<span class="sourceLineNo">232</span>          createParent, replication, blockSize, supportedVersions, null, null);<a name="line.232"></a>
 <span class="sourceLineNo">233</span>    };<a name="line.233"></a>
 <span class="sourceLineNo">234</span>  }<a name="line.234"></a>
 <span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.236"></a>
+<span class="sourceLineNo">236</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.236"></a>
 <span class="sourceLineNo">237</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.237"></a>
 <span class="sourceLineNo">238</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      CryptoProtocolVersion[].class);<a name="line.239"></a>
+<span class="sourceLineNo">239</span>      CryptoProtocolVersion[].class, String.class);<a name="line.239"></a>
 <span class="sourceLineNo">240</span><a name="line.240"></a>
 <span class="sourceLineNo">241</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.241"></a>
 <span class="sourceLineNo">242</span>        supportedVersions) -&gt; {<a name="line.242"></a>
 <span class="sourceLineNo">243</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        createParent, replication, blockSize, supportedVersions);<a name="line.244"></a>
+<span class="sourceLineNo">244</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.244"></a>
 <span class="sourceLineNo">245</span>    };<a name="line.245"></a>
 <span class="sourceLineNo">246</span>  }<a name="line.246"></a>
 <span class="sourceLineNo">247</span><a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    try {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return createFileCreator3();<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    } catch (NoSuchMethodException e) {<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    }<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    return createFileCreator2();<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  }<a name="line.255"></a>
-<span class="sourceLineNo">256</span><a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // cancel the processing if DFSClient is already closed.<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.258"></a>
+<span class="sourceLineNo">248</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      CryptoProtocolVersion[].class);<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        supportedVersions) -&gt; {<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.255"></a>
+<span class="sourceLineNo">256</span>        createParent, replication, blockSize, supportedVersions);<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    };<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  }<a name="line.258"></a>
 <span class="sourceLineNo">259</span><a name="line.259"></a>
-<span class="sourceLineNo">260</span>    private final DFSClient client;<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    public CancelOnClose(DFSClient client) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      this.client = client;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
-<span class="sourceLineNo">265</span><a name="line.265"></a>
-<span class="sourceLineNo">266</span>    @Override<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    public boolean progress() {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  static {<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    try {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      LEASE_MANAGER = createLeaseManager();<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      FILE_CREATOR = createFileCreator();<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    } catch (Exception e) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.278"></a>
-<span class="sourceLineNo">279</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          "HBASE-16110 for more information.";<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      LOG.error(msg, e);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      throw new Error(msg, e);<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
-<span class="sourceLineNo">285</span><a name="line.285"></a>
-<span class="sourceLineNo">286</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.287"></a>
+<span class="sourceLineNo">260</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    try {<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      return createFileCreator3_3();<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    } catch (NoSuchMethodException e) {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 3.2 or below");<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    }<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>    try {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      return createFileCreator3();<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    } catch (NoSuchMethodException e) {<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    }<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    return createFileCreator2();<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  }<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>  // cancel the processing if DFSClient is already closed.<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.276"></a>
+<span class="sourceLineNo">277</span><a name="line.277"></a>
+<span class="sourceLineNo">278</span>    private final DFSClient client;<a name="line.278"></a>
+<span class="sourceLineNo">279</span><a name="line.279"></a>
+<span class="sourceLineNo">280</span>    public CancelOnClose(DFSClient client) {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      this.client = client;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    public boolean progress() {<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
 <span class="sourceLineNo">288</span>  }<a name="line.288"></a>
 <span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  }<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    return client.getConf().createChecksum(null);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  }<a name="line.296"></a>
-<span class="sourceLineNo">297</span><a name="line.297"></a>
-<span class="sourceLineNo">298</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    List&lt;Integer&gt; flagList = ack.getFlagList();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    Integer headerFlag;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    if (flagList.isEmpty()) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      Status reply = ack.getReply(0);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      headerFlag = PipelineAck.combineHeader(ECN.DISABLED, reply);<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    } else {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      headerFlag = flagList.get(0);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    return PipelineAck.getStatusFromHeader(headerFlag);<a name="line.307"></a>
-<span class="sourceLineNo">308</span>  }<a name="line.308"></a>
-<span class="sourceLineNo">309</span><a name="line.309"></a>
-<span class="sourceLineNo">310</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      new ProtobufVarint32FrameDecoder(),<a name="line.313"></a>
-<span class="sourceLineNo">314</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>        @Override<a name="line.317"></a>
-<span class="sourceLineNo">318</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.318"></a>
-<span class="sourceLineNo">319</span>            throws Exception {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>          Status pipelineStatus = resp.getStatus();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.322"></a>
-<span class="sourceLineNo">323</span>          }<a name="line.323"></a>
-<span class="sourceLineNo">324</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.324"></a>
-<span class="sourceLineNo">325</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.327"></a>
-<span class="sourceLineNo">328</span>                  resp.getMessage() + ", " + logInfo);<a name="line.328"></a>
-<span class="sourceLineNo">329</span>            } else {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.330"></a>
-<span class="sourceLineNo">331</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>            }<a name="line.332"></a>
-<span class="sourceLineNo">333</span>          }<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // success<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          ChannelPipeline p = ctx.pipeline();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.337"></a>
-<span class="sourceLineNo">338</span>            // of pipeline.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>            if (handler instanceof IdleStateHandler) {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>              break;<a name="line.340"></a>
-<span class="sourceLineNo">341</span>            }<a name="line.341"></a>
-<span class="sourceLineNo">342</span>          }<a name="line.342"></a>
-<span class="sourceLineNo">343</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.343"></a>
-<span class="sourceLineNo">344</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>          ctx.channel().config().setAutoRead(false);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>          promise.trySuccess(ctx.channel());<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
-<span class="sourceLineNo">348</span><a name="line.348"></a>
-<span class="sourceLineNo">349</span>        @Override<a name="line.349"></a>
-<span class="sourceLineNo">350</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.351"></a>
-<span class="sourceLineNo">352</span>        }<a name="line.352"></a>
-<span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>        @Override<a name="line.354"></a>
-<span class="sourceLineNo">355</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.356"></a>
-<span class="sourceLineNo">357</span>            promise<a name="line.357"></a>
-<span class="sourceLineNo">358</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>          } else {<a name="line.359"></a>
-<span class="sourceLineNo">360</span>            super.userEventTriggered(ctx, evt);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>          }<a name="line.361"></a>
-<span class="sourceLineNo">362</span>        }<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>        @Override<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>          promise.tryFailure(cause);<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        }<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      });<a name="line.368"></a>
-<span class="sourceLineNo">369</span>  }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>  private static void requestWriteBlock(Channel channel, StorageType storageType,<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    OpWriteBlockProto proto =<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    int protoLen = proto.getSerializedSize();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    ByteBuf buffer =<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    channel.writeAndFlush(buffer);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  }<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      StorageType storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      throws IOException {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>      @Override<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        if (future.isSuccess()) {<a name="line.394"></a>
-<span class="sourceLineNo">395</span>          // setup response processing pipeline first, then send request.<a name="line.395"></a>
-<span class="sourceLineNo">396</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.396"></a>
-<span class="sourceLineNo">397</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        } else {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>          promise.tryFailure(future.cause());<a name="line.399"></a>
-<span class="sourceLineNo">400</span>        }<a name="line.400"></a>
-<span class="sourceLineNo">401</span>      }<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    });<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
-<span class="sourceLineNo">404</span><a name="line.404"></a>
-<span class="sourceLineNo">405</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.407"></a>
-<span class="sourceLineNo">408</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    StorageType[] storageTypes = locatedBlock.getStorageTypes();<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    boolean connectToDnViaHostname =<a name="line.411"></a>
-<span class="sourceLineNo">412</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PBHelperClient.convert(blockCopy))<a name="line.417"></a>
-<span class="sourceLineNo">418</span>        .setToken(PBHelperClient.convert(locatedBlock.getBlockToken())))<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      .setClientName(clientName).build();<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.423"></a>
-<span class="sourceLineNo">424</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.424"></a>
-<span class="sourceLineNo">425</span>        .setRequestedChecksum(checksumProto)<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      StorageType storageType = storageTypes[i];<a name="line.430"></a>
-<span class="sourceLineNo">431</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      futureList.add(promise);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>            @Override<a name="line.437"></a>
-<span class="sourceLineNo">438</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.438"></a>
-<span class="sourceLineNo">439</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.439"></a>
-<span class="sourceLineNo">440</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.440"></a>
-<span class="sourceLineNo">441</span>              // a null handler.<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            }<a name="line.442"></a>
-<span class="sourceLineNo">443</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>            @Override<a name="line.445"></a>
-<span class="sourceLineNo">446</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>              if (future.isSuccess()) {<a name="line.447"></a>
-<span class="sourceLineNo">448</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.448"></a>
-<span class="sourceLineNo">449</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.449"></a>
-<span class="sourceLineNo">450</span>              } else {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>                promise.tryFailure(future.cause());<a name="line.451"></a>
-<span class="sourceLineNo">452</span>              }<a name="line.452"></a>
-<span class="sourceLineNo">453</span>            }<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          });<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    }<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    return futureList;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>  }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  public static class NameNodeException extends IOException {<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>    public NameNodeException(Throwable cause) {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      super(cause);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    }<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
-<span class="sourceLineNo">470</span><a name="line.470"></a>
-<span class="sourceLineNo">471</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.471"></a>
-<span class="sourceLineNo">472</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.472"></a>
-<span class="sourceLineNo">473</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    Configuration conf = dfs.getConf();<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    DFSClient client = dfs.getClient();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    String clientName = client.getClientName();<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    ClientProtocol namenode = client.getNamenode();<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.479"></a>
-<span class="sourceLineNo">480</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    for (int retry = 0;; retry++) {<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      HdfsFileStatus stat;<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.486"></a>
-<span class="sourceLineNo">487</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      } catch (Exception e) {<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        if (e instanceof RemoteException) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>          throw (RemoteException) e;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        } else {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>          throw new NameNodeException(e);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        }<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      }<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      beginFileLease(client, stat.getFileId());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      boolean succ = false;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      LocatedBlock locatedBlock = null;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      try {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>        DataChecksum summer = createChecksum(client);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        locatedBlock = namenode.addBlock(src, client.getClientName(), null, excludesNodes,<a name="line.502"></a>
-<span class="sourceLineNo">503</span>          stat.getFileId(), null, null);<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          try {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          } catch (Exception e) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>            // exclude the broken DN next time<a name="line.511"></a>
-<span class="sourceLineNo">512</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.512"></a>
-<span class="sourceLineNo">513</span>            throw e;<a name="line.513"></a>
-<span class="sourceLineNo">514</span>          }<a name="line.514"></a>
-<span class="sourceLineNo">515</span>        }<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.516"></a>
-<span class="sourceLineNo">517</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.518"></a>
-<span class="sourceLineNo">519</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.519"></a>
-<span class="sourceLineNo">520</span>        succ = true;<a name="line.520"></a>
-<span class="sourceLineNo">521</span>        return output;<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      } catch (RemoteException e) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.523"></a>
-<span class="sourceLineNo">524</span>        if (shouldRetryCreate(e)) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          if (retry &gt;= createMaxRetries) {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>            throw e.unwrapRemoteException();<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          }<a name="line.527"></a>
-<span class="sourceLineNo">528</span>        } else {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          throw e.unwrapRemoteException();<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      } catch (IOException e) {<a name="line.531"></a>
-<span class="sourceLineNo">532</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>        if (retry &gt;= createMaxRetries) {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          throw e;<a name="line.534"></a>
-<span class="sourceLineNo">535</span>        }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>        // overwrite the old broken file.<a name="line.536"></a>
-<span class="sourceLineNo">537</span>        overwrite = true;<a name="line.537"></a>
-<span class="sourceLineNo">538</span>        try {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.539"></a>
-<span class="sourceLineNo">540</span>        } catch (InterruptedException ie) {<a name="line.540"></a>
-<span class="sourceLineNo">541</span>          throw new InterruptedIOException();<a name="line.541"></a>
-<span class="sourceLineNo">542</span>        }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      } finally {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>        if (!succ) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>          if (futureList != null) {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>                @Override<a name="line.549"></a>
-<span class="sourceLineNo">550</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.550"></a>
-<span class="sourceLineNo">551</span>                  if (future.isSuccess()) {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>                    future.getNow().close();<a name="line.552"></a>
-<span class="sourceLineNo">553</span>                  }<a name="line.553"></a>
-<span class="sourceLineNo">554</span>                }<a name="line.554"></a>
-<span class="sourceLineNo">555</span>              });<a name="line.555"></a>
-<span class="sourceLineNo">556</span>            }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          }<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          endFileLease(client, stat.getFileId());<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * inside an {@link EventLoop}.<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.569"></a>
-<span class="sourceLineNo">570</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.571"></a>
-<span class="sourceLineNo">572</span><a name="line.572"></a>
-<span class="sourceLineNo">573</span>      @Override<a name="line.573"></a>
-<span class="sourceLineNo">574</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.574"></a>
-<span class="sourceLineNo">575</span>          throws IOException, UnresolvedLinkException {<a name="line.575"></a>
-<span class="sourceLineNo">576</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.576"></a>
-<span class="sourceLineNo">577</span>          blockSize, eventLoopGroup, channelClass);<a name="line.577"></a>
+<span class="sourceLineNo">290</span>  static {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    try {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      LEASE_MANAGER = createLeaseManager();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.293"></a>
+<span class="sourceLineNo">294</span>      FILE_CREATOR = createFileCreator();<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    } catch (Exception e) {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.297"></a>
+<span class="sourceLineNo">298</span>          "HBASE-16110 for more information.";<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      LOG.error(msg, e);<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      throw new Error(msg, e);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    }<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
+<span class="sourceLineNo">303</span><a name="line.303"></a>
+<span class="sourceLineNo">304</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  }<a name="line.306"></a>
+<span class="sourceLineNo">307</span><a name="line.307"></a>
+<span class="sourceLineNo">308</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    return client.getConf().createChecksum(null);<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  }<a name="line.314"></a>
+<span class="sourceLineNo">315</span><a name="line.315"></a>
+<span class="sourceLineNo">316</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    List&lt;Integer&gt; flagList = ack.getFlagList();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    Integer headerFlag;<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    if (flagList.isEmpty()) {<a name="line.319"></a>
+<span class="sourceLineNo">320</span>      Status reply = ack.getReply(0);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      headerFlag = PipelineAck.combineHeader(ECN.DISABLED, reply);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    } else {<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      headerFlag = flagList.get(0);<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    }<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    return PipelineAck.getStatusFromHeader(headerFlag);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.328"></a>
+<span class="sourceLineNo">329</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.330"></a>
+<span class="sourceLineNo">331</span>      new ProtobufVarint32FrameDecoder(),<a name="line.331"></a>
+<span class="sourceLineNo">332</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.333"></a>
+<span class="sourceLineNo">334</span><a name="line.334"></a>
+<span class="sourceLineNo">335</span>        @Override<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.336"></a>
+<span class="sourceLineNo">337</span>            throws Exception {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          Status pipelineStatus = resp.getStatus();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.339"></a>
+<span class="sourceLineNo">340</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.340"></a>
+<span class="sourceLineNo">341</span>          }<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.343"></a>
+<span class="sourceLineNo">344</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.344"></a>
+<span class="sourceLineNo">345</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.345"></a>
+<span class="sourceLineNo">346</span>                  resp.getMessage() + ", " + logInfo);<a name="line.346"></a>
+<span class="sourceLineNo">347</span>            } else {<a name="line.347"></a>
+<span class="sourceLineNo">348</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.348"></a>
+<span class="sourceLineNo">349</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.349"></a>
+<span class="sourceLineNo">350</span>            }<a name="line.350"></a>
+<span class="sourceLineNo">351</span>          }<a name="line.351"></a>
+<span class="sourceLineNo">352</span>          // success<a name="line.352"></a>
+<span class="sourceLineNo">353</span>          ChannelPipeline p = ctx.pipeline();<a name="line.353"></a>
+<span class="sourceLineNo">354</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.354"></a>
+<span class="sourceLineNo">355</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.355"></a>
+<span class="sourceLineNo">356</span>            // of pipeline.<a name="line.356"></a>
+<span class="sourceLineNo">357</span>            if (handler instanceof IdleStateHandler) {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>              break;<a name="line.358"></a>
+<span class="sourceLineNo">359</span>            }<a name="line.359"></a>
+<span class="sourceLineNo">360</span>          }<a name="line.360"></a>
+<span class="sourceLineNo">361</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.361"></a>
+<span class="sourceLineNo">362</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.362"></a>
+<span class="sourceLineNo">363</span>          ctx.channel().config().setAutoRead(false);<a name="line.363"></a>
+<span class="sourceLineNo">364</span>          promise.trySuccess(ctx.channel());<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
+<span class="sourceLineNo">366</span><a name="line.366"></a>
+<span class="sourceLineNo">367</span>        @Override<a name="line.367"></a>
+<span class="sourceLineNo">368</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.369"></a>
+<span class="sourceLineNo">370</span>        }<a name="line.370"></a>
+<span class="sourceLineNo">371</span><a name="line.371"></a>
+<span class="sourceLineNo">372</span>        @Override<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.373"></a>
+<span class="sourceLineNo">374</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>            promise<a name="line.375"></a>
+<span class="sourceLineNo">376</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.376"></a>
+<span class="sourceLineNo">377</span>          } else {<a name="line.377"></a>
+<span class="sourceLineNo">378</span>            super.userEventTriggered(ctx, evt);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>          }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>        }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>        @Override<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          promise.tryFailure(cause);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>        }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      });<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  }<a name="line.387"></a>
+<span class="sourceLineNo">388</span><a name="line.388"></a>
+<span class="sourceLineNo">389</span>  private static void requestWriteBlock(Channel channel, StorageType storageType,<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    OpWriteBlockProto proto =<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();<a name="line.392"></a>
+<span class="sourceLineNo">393</span>    int protoLen = proto.getSerializedSize();<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    ByteBuf buffer =<a name="line.394"></a>
+<span class="sourceLineNo">395</span>      channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.397"></a>
+<span class="sourceLineNo">398</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    channel.writeAndFlush(buffer);<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  }<a name="line.400"></a>
+<span class="sourceLineNo">401</span><a name="line.401"></a>
+<span class="sourceLineNo">402</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.402"></a>
+<span class="sourceLineNo">403</span>      StorageType storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.403"></a>
+<span class="sourceLineNo">404</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.404"></a>
+<span class="sourceLineNo">405</span>      throws IOException {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.406"></a>
+<span class="sourceLineNo">407</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.407"></a>
+<span class="sourceLineNo">408</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.408"></a>
+<span class="sourceLineNo">409</span><a name="line.409"></a>
+<span class="sourceLineNo">410</span>      @Override<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>        if (future.isSuccess()) {<a name="line.412"></a>
+<span class="sourceLineNo">413</span>          // setup response processing pipeline first, then send request.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.415"></a>
+<span class="sourceLineNo">416</span>        } else {<a name="line.416"></a>
+<span class="sourceLineNo">417</span>          promise.tryFailure(future.cause());<a name="line.417"></a>
+<span class="sourceLineNo">418</span>        }<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      }<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    });<a name="line.420"></a>
+<span class="sourceLineNo">421</span>  }<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.425"></a>
+<span class="sourceLineNo">426</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    StorageType[] storageTypes = locatedBlock.getStorageTypes();<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.428"></a>
+<span class="sourceLineNo">429</span>    boolean connectToDnViaHostname =<a name="line.429"></a>
+<span class="sourceLineNo">430</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.432"></a>
+<span class="sourceLineNo">433</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.434"></a>
+<span class="sourceLineNo">435</span>      .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PBHelperClient.convert(blockCopy))<a name="line.435"></a>
+<span class="sourceLineNo">436</span>        .setToken(PBHelperClient.convert(locatedBlock.getBlockToken())))<a name="line.436"></a>
+<span class="sourceLineNo">437</span>      .setClientName(clientName).build();<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.438"></a>
+<span class="sourceLineNo">439</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        .setRequestedChecksum(checksumProto)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.445"></a>
+<span class="sourceLineNo">446</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.447"></a>
+<span class="sourceLineNo">448</span>      StorageType storageType = storageTypes[i];<a name="line.448"></a>
+<span class="sourceLineNo">449</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>      futureList.add(promise);<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.453"></a>
+<span class="sourceLineNo">454</span><a name="line.454"></a>
+<span class="sourceLineNo">455</span>            @Override<a name="line.455"></a>
+<span class="sourceLineNo">456</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.456"></a>
+<span class="sourceLineNo">457</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.457"></a>
+<span class="sourceLineNo">458</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.458"></a>
+<span class="sourceLineNo">459</span>              // a null handler.<a name="line.459"></a>
+<span class="sourceLineNo">460</span>            }<a name="line.460"></a>
+<span class="sourceLineNo">461</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.461"></a>
+<span class="sourceLineNo">462</span><a name="line.462"></a>
+<span class="sourceLineNo">463</span>            @Override<a name="line.463"></a>
+<span class="sourceLineNo">464</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>              if (future.isSuccess()) {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.466"></a>
+<span class="sourceLineNo">467</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.467"></a>
+<span class="sourceLineNo">468</span>              } else {<a name="line.468"></a>
+<span class="sourceLineNo">469</span>                promise.tryFailure(future.cause());<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              }<a name="line.470"></a>
+<span class="sourceLineNo">471</span>            }<a name="line.471"></a>
+<span class="sourceLineNo">472</span>          });<a name="line.472"></a>
+<span class="sourceLineNo">473</span>    }<a name="line.473"></a>
+<span class="sourceLineNo">474</span>    return futureList;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>  }<a name="line.475"></a>
+<span class="sourceLineNo">476</span><a name="line.476"></a>
+<span class="sourceLineNo">477</span>  /**<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   */<a name="line.479"></a>
+<span class="sourceLineNo">480</span>  public static class NameNodeException extends IOException {<a name="line.480"></a>
+<span class="sourceLineNo">481</span><a name="line.481"></a>
+<span class="sourceLineNo">482</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>    public NameNodeException(Throwable cause) {<a name="line.484"></a>
+<span class="sourceLineNo">485</span>      super(cause);<a name="line.485"></a>
+<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
+<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
+<span class="sourceLineNo">488</span><a name="line.488"></a>
+<span class="sourceLineNo">489</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.490"></a>
+<span class="sourceLineNo">491</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    Configuration conf = dfs.getConf();<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    DFSClient client = dfs.getClient();<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    String clientName = client.getClientName();<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    ClientProtocol namenode = client.getNamenode();<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    for (int retry = 0;; retry++) {<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      HdfsFileStatus stat;<a name="line.501"></a>
+<span class="sourceLineNo">502</span>      try {<a name="line.502"></a>
+<span class="sourceLineNo">503</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.503"></a>
+<span class="sourceLineNo">504</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.504"></a>
+<span class="sourceLineNo">505</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.505"></a>
+<span class="sourceLineNo">506</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.506"></a>
+<span class="sourceLineNo">507</span>      } catch (Exception e) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>        if (e instanceof RemoteException) {<a name="line.508"></a>
+<span class="sourceLineNo">509</span>          throw (RemoteException) e;<a name="line.509"></a>
+<span class="sourceLineNo">510</span>        } else {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>          throw new NameNodeException(e);<a name="line.511"></a>
+<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      }<a name="line.513"></a>
+<span class="sourceLineNo">514</span>      beginFileLease(client, stat.getFileId());<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      boolean succ = false;<a name="line.515"></a>
+<span class="sourceLineNo">516</span>      LocatedBlock locatedBlock = null;<a name="line.516"></a>
+<span class="sourceLineNo">517</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.517"></a>
+<span class="sourceLineNo">518</span>      try {<a name="line.518"></a>
+<span class="sourceLineNo">519</span>        DataChecksum summer = createChecksum(client);<a name="line.519"></a>
+<span class="sourceLineNo">520</span>        locatedBlock = namenode.addBlock(src, client.getClientName(), null, excludesNodes,<a name="line.520"></a>
+<span class="sourceLineNo">521</span>          stat.getFileId(), null, null);<a name="line.521"></a>
+<span class="sourceLineNo">522</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.523"></a>
+<span class="sourceLineNo">524</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.524"></a>
+<span class="sourceLineNo">525</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          try {<a name="line.526"></a>
+<span class="sourceLineNo">527</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.527"></a>
+<span class="sourceLineNo">528</span>          } catch (Exception e) {<a name="line.528"></a>
+<span class="sourceLineNo">529</span>            // exclude the broken DN next time<a name="line.529"></a>
+<span class="sourceLineNo">530</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.530"></a>
+<span class="sourceLineNo">531</span>            throw e;<a name="line.531"></a>
+<span class="sourceLineNo">532</span>          }<a name="line.532"></a>
+<span class="sourceLineNo">533</span>        }<a name="line.533"></a>
+<span class="sourceLineNo">534</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.534"></a>
+<span class="sourceLineNo">535</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.535"></a>
+<span class="sourceLineNo">536</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.536"></a>
+<span class="sourceLineNo">537</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        succ = true;<a name="line.538"></a>
+<span class="sourceLineNo">539</span>        return output;<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      } catch (RemoteException e) {<a name="line.540"></a>
+<span class="sourceLineNo">541</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>        if (shouldRetryCreate(e)) {<a name="line.542"></a>
+<span class="sourceLineNo">543</span>          if (retry &gt;= createMaxRetries) {<a name="line.543"></a>
+<span class="sourceLineNo">544</span>            throw e.unwrapRemoteException();<a name="line.544"></a>
+<span class="sourceLineNo">545</span>          }<a name="line.545"></a>
+<span class="sourceLineNo">546</span>        } else {<a name="line.546"></a>
+<span class="sourceLineNo">547</span>          throw e.unwrapRemoteException();<a name="line.547"></a>
+<span class="sourceLineNo">548</span>        }<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      } catch (IOException e) {<a name="line.549"></a>
+<span class="sourceLineNo">550</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.550"></a>
+<span class="sourceLineNo">551</span>        if (retry &gt;= createMaxRetries) {<a name="line.551"></a>
+<span class="sourceLineNo">552</span>          throw e;<a name="line.552"></a>
+<span class="sourceLineNo">553</span>        }<a name="line.553"></a>
+<span class="sourceLineNo">554</span>        // overwrite the old broken file.<a name="line.554"></a>
+<span class="sourceLineNo">555</span>        overwrite = true;<a name="line.555"></a>
+<span class="sourceLineNo">556</span>        try {<a name="line.556"></a>
+<span class="sourceLineNo">557</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.557"></a>
+<span class="sourceLineNo">558</span>        } catch (InterruptedException ie) {<a name="line.558"></a>
+<span class="sourceLineNo">559</span>          throw new InterruptedIOException();<a name="line.559"></a>
+<span class="sourceLineNo">560</span>        }<a name="line.560"></a>
+<span class="sourceLineNo">561</span>      } finally {<a name="line.561"></a>
+<span class="sourceLineNo">562</span>        if (!succ) {<a name="line.562"></a>
+<span class="sourceLineNo">563</span>          if (futureList != null) {<a name="line.563"></a>
+<span class="sourceLineNo">564</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.565"></a>
+<span class="sourceLineNo">566</span><a name="line.566"></a>
+<span class="sourceLineNo">567</span>                @Override<a name="line.567"></a>
+<span class="sourceLineNo">568</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.568"></a>
+<span class="sourceLineNo">569</span>                  if (future.isSuccess()) {<a name="line.569"></a>
+<span class="sourceLineNo">570</span>                    future.getNow().close();<a name="line.570"></a>
+<span class="sourceLineNo">571</span>                  }<a name="line.571"></a>
+<span class="sourceLineNo">572</span>                }<a name="line.572"></a>
+<span class="sourceLineNo">573</span>              });<a name="line.573"></a>
+<span class="sourceLineNo">574</span>            }<a name="line.574"></a>
+<span class="sourceLineNo">575</span>          }<a name="line.575"></a>
+<span class="sourceLineNo">576</span>          endFileLease(client, stat.getFileId());<a name="line.576"></a>
+<span class="sourceLineNo">577</span>        }<a name="line.577"></a>
 <span class="sourceLineNo">578</span>      }<a name="line.578"></a>
-<span class="sourceLineNo">579</span><a name="line.579"></a>
-<span class="sourceLineNo">580</span>      @Override<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>        throw new UnsupportedOperationException();<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      }<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }.resolve(dfs, f);<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.589"></a>
-<span class="sourceLineNo">590</span>    // DFSOutputStream.newStreamForCreate.<a name="line.590"></a>
-<span class="sourceLineNo">591</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.591"></a>
-<span class="sourceLineNo">592</span>  }<a name="line.592"></a>
-<span class="sourceLineNo">593</span><a name="line.593"></a>
-<span class="sourceLineNo">594</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      ExtendedBlock block, long fileId) {<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    for (int retry = 0;; retry++) {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      try {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>          endFileLease(client, fileId);<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          return;<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } else {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      } catch (RemoteException e) {<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        IOException ioe = e.unwrapRemoteException();<a name="line.605"></a>
-<span class="sourceLineNo">606</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>          return;<a name="line.608"></a>
-<span class="sourceLineNo">609</span>        } else {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        }<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      } catch (Exception e) {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      }<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      sleepIgnoreInterrupt(retry);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    }<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  }<a name="line.617"></a>
-<span class="sourceLineNo">618</span><a name="line.618"></a>
-<span class="sourceLineNo">619</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    try {<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    } catch (InterruptedException e) {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    }<a name="line.623"></a>
-<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
-<span class="sourceLineNo">625</span>}<a name="line.625"></a>
+<span class="sourceLineNo">579</span>    }<a name="line.579"></a>
+<span class="sourceLineNo">580</span>  }<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>  /**<a name="line.582"></a>
+<span class="sourceLineNo">583</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.583"></a>
+<span class="sourceLineNo">584</span>   * inside an {@link EventLoop}.<a name="line.584"></a>
+<span class="sourceLineNo">585</span>   */<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.587"></a>
+<span class="sourceLineNo">588</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.588"></a>
+<span class="sourceLineNo">589</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.589"></a>
+<span class="sourceLineNo">590</span><a name="line.590"></a>
+<span class="sourceLineNo">591</span>      @Override<a name="line.591"></a>
+<span class="sourceLineNo">592</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.592"></a>
+<span class="sourceLineNo">593</span>          throws IOException, UnresolvedLinkException {<a name="line.593"></a>
+<span class="sourceLineNo">594</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.594"></a>
+<span class="sourceLineNo">595</span>          blockSize, eventLoopGroup, channelClass);<a name="line.595"></a>
+<span class="sourceLineNo">596</span>      }<a name="line.596"></a>
+<span class="sourceLineNo">597</span><a name="line.597"></a>
+<span class="sourceLineNo">598</span>      @Override<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.599"></a>
+<span class="sourceLineNo">600</span>        throw new UnsupportedOperationException();<a name="line.600"></a>
+<span class="sourceLineNo">601</span>      }<a name="line.601"></a>
+<span class="sourceLineNo">602</span>    }.resolve(dfs, f);<a name="line.602"></a>
+<span class="sourceLineNo">603</span>  }<a name="line.603"></a>
+<span class="sourceLineNo">604</span><a name="line.604"></a>
+<span class="sourceLineNo">605</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.607"></a>
+<span class="sourceLineNo">608</span>    // DFSOutputStream.newStreamForCreate.<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.609"></a>
+<span class="sourceLineNo">610</span>  }<a name="line.610"></a>
+<span class="sourceLineNo">611</span><a name="line.611"></a>
+<span class="sourceLineNo">612</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.612"></a>
+<span class="sourceLineNo">613</span>      ExtendedBlock block, long fileId) {<a name="line.613"></a>
+<span class="sourceLineNo">614</span>    for (int retry = 0;; retry++) {<a name="line.614"></a>
+<span class="sourceLineNo">615</span>      try {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.616"></a>
+<span class="sourceLineNo">617</span>          endFileLease(client, fileId);<a name="line.617"></a>
+<span class="sourceLineNo">618</span>          return;<a name="line.618"></a>
+<span class="sourceLineNo">619</span>        } else {<a name="line.619"></a>
+<span class="sourceLineNo">620</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.620"></a>
+<span class="sourceLineNo">621</span>        }<a name="line.621"></a>
+<span class="sourceLineNo">622</span>      } catch (RemoteException e) {<a name="line.622"></a>
+<span class="sourceLineNo">623</span>        IOException ioe = e.unwrapRemoteException();<a name="line.623"></a>
+<span class="sourceLineNo">624</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.624"></a>
+<span class="sourceLineNo">625</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.625"></a>
+<span class="sourceLineNo">626</span>          return;<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        } else {<a name="line.627"></a>
+<span class="sourceLineNo">628</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.628"></a>
+<span class="sourceLineNo">629</span>        }<a name="line.629"></a>
+<span class="sourceLineNo">630</span>      } catch (Exception e) {<a name="line.630"></a>
+<span class="sourceLineNo">631</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      }<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      sleepIgnoreInterrupt(retry);<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
+<span class="sourceLineNo">635</span>  }<a name="line.635"></a>
+<span class="sourceLineNo">636</span><a name="line.636"></a>
+<span class="sourceLineNo">637</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.637"></a>
+<span class="sourceLineNo">638</span>    try {<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.639"></a>
+<span class="sourceLineNo">640</span>    } catch (InterruptedException e) {<a name="line.640"></a>
+<span class="sourceLineNo">641</span>    }<a name="line.641"></a>
+<span class="sourceLineNo">642</span>  }<a name="line.642"></a>
+<span class="sourceLineNo">643</span>}<a name="line.643"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
index 6684af5..3556576 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.DFSClientAdaptor.html
@@ -229,408 +229,426 @@
 <span class="sourceLineNo">221</span>    };<a name="line.221"></a>
 <span class="sourceLineNo">222</span>  }<a name="line.222"></a>
 <span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.224"></a>
+<span class="sourceLineNo">224</span>  private static FileCreator createFileCreator3_3() throws NoSuchMethodException {<a name="line.224"></a>
 <span class="sourceLineNo">225</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.226"></a>
-<span class="sourceLineNo">227</span>      CryptoProtocolVersion[].class, String.class);<a name="line.227"></a>
+<span class="sourceLineNo">226</span>        String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        CryptoProtocolVersion[].class, String.class, String.class);<a name="line.227"></a>
 <span class="sourceLineNo">228</span><a name="line.228"></a>
 <span class="sourceLineNo">229</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.229"></a>
 <span class="sourceLineNo">230</span>        supportedVersions) -&gt; {<a name="line.230"></a>
 <span class="sourceLineNo">231</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.231"></a>
-<span class="sourceLineNo">232</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.232"></a>
+<span class="sourceLineNo">232</span>          createParent, replication, blockSize, supportedVersions, null, null);<a name="line.232"></a>
 <span class="sourceLineNo">233</span>    };<a name="line.233"></a>
 <span class="sourceLineNo">234</span>  }<a name="line.234"></a>
 <span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.236"></a>
+<span class="sourceLineNo">236</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.236"></a>
 <span class="sourceLineNo">237</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.237"></a>
 <span class="sourceLineNo">238</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      CryptoProtocolVersion[].class);<a name="line.239"></a>
+<span class="sourceLineNo">239</span>      CryptoProtocolVersion[].class, String.class);<a name="line.239"></a>
 <span class="sourceLineNo">240</span><a name="line.240"></a>
 <span class="sourceLineNo">241</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.241"></a>
 <span class="sourceLineNo">242</span>        supportedVersions) -&gt; {<a name="line.242"></a>
 <span class="sourceLineNo">243</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        createParent, replication, blockSize, supportedVersions);<a name="line.244"></a>
+<span class="sourceLineNo">244</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.244"></a>
 <span class="sourceLineNo">245</span>    };<a name="line.245"></a>
 <span class="sourceLineNo">246</span>  }<a name="line.246"></a>
 <span class="sourceLineNo">247</span><a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    try {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return createFileCreator3();<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    } catch (NoSuchMethodException e) {<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    }<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    return createFileCreator2();<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  }<a name="line.255"></a>
-<span class="sourceLineNo">256</span><a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // cancel the processing if DFSClient is already closed.<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.258"></a>
+<span class="sourceLineNo">248</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      CryptoProtocolVersion[].class);<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        supportedVersions) -&gt; {<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.255"></a>
+<span class="sourceLineNo">256</span>        createParent, replication, blockSize, supportedVersions);<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    };<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  }<a name="line.258"></a>
 <span class="sourceLineNo">259</span><a name="line.259"></a>
-<span class="sourceLineNo">260</span>    private final DFSClient client;<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    public CancelOnClose(DFSClient client) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      this.client = client;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
-<span class="sourceLineNo">265</span><a name="line.265"></a>
-<span class="sourceLineNo">266</span>    @Override<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    public boolean progress() {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  static {<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    try {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      LEASE_MANAGER = createLeaseManager();<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      FILE_CREATOR = createFileCreator();<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    } catch (Exception e) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.278"></a>
-<span class="sourceLineNo">279</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          "HBASE-16110 for more information.";<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      LOG.error(msg, e);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      throw new Error(msg, e);<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
-<span class="sourceLineNo">285</span><a name="line.285"></a>
-<span class="sourceLineNo">286</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.287"></a>
+<span class="sourceLineNo">260</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    try {<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      return createFileCreator3_3();<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    } catch (NoSuchMethodException e) {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 3.2 or below");<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    }<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>    try {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      return createFileCreator3();<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    } catch (NoSuchMethodException e) {<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    }<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    return createFileCreator2();<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  }<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>  // cancel the processing if DFSClient is already closed.<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.276"></a>
+<span class="sourceLineNo">277</span><a name="line.277"></a>
+<span class="sourceLineNo">278</span>    private final DFSClient client;<a name="line.278"></a>
+<span class="sourceLineNo">279</span><a name="line.279"></a>
+<span class="sourceLineNo">280</span>    public CancelOnClose(DFSClient client) {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      this.client = client;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    public boolean progress() {<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
 <span class="sourceLineNo">288</span>  }<a name="line.288"></a>
 <span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  }<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    return client.getConf().createChecksum(null);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  }<a name="line.296"></a>
-<span class="sourceLineNo">297</span><a name="line.297"></a>
-<span class="sourceLineNo">298</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    List&lt;Integer&gt; flagList = ack.getFlagList();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    Integer headerFlag;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    if (flagList.isEmpty()) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      Status reply = ack.getReply(0);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      headerFlag = PipelineAck.combineHeader(ECN.DISABLED, reply);<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    } else {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      headerFlag = flagList.get(0);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    return PipelineAck.getStatusFromHeader(headerFlag);<a name="line.307"></a>
-<span class="sourceLineNo">308</span>  }<a name="line.308"></a>
-<span class="sourceLineNo">309</span><a name="line.309"></a>
-<span class="sourceLineNo">310</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      new ProtobufVarint32FrameDecoder(),<a name="line.313"></a>
-<span class="sourceLineNo">314</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>        @Override<a name="line.317"></a>
-<span class="sourceLineNo">318</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.318"></a>
-<span class="sourceLineNo">319</span>            throws Exception {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>          Status pipelineStatus = resp.getStatus();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.322"></a>
-<span class="sourceLineNo">323</span>          }<a name="line.323"></a>
-<span class="sourceLineNo">324</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.324"></a>
-<span class="sourceLineNo">325</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.327"></a>
-<span class="sourceLineNo">328</span>                  resp.getMessage() + ", " + logInfo);<a name="line.328"></a>
-<span class="sourceLineNo">329</span>            } else {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.330"></a>
-<span class="sourceLineNo">331</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>            }<a name="line.332"></a>
-<span class="sourceLineNo">333</span>          }<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // success<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          ChannelPipeline p = ctx.pipeline();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.337"></a>
-<span class="sourceLineNo">338</span>            // of pipeline.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>            if (handler instanceof IdleStateHandler) {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>              break;<a name="line.340"></a>
-<span class="sourceLineNo">341</span>            }<a name="line.341"></a>
-<span class="sourceLineNo">342</span>          }<a name="line.342"></a>
-<span class="sourceLineNo">343</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.343"></a>
-<span class="sourceLineNo">344</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>          ctx.channel().config().setAutoRead(false);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>          promise.trySuccess(ctx.channel());<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
-<span class="sourceLineNo">348</span><a name="line.348"></a>
-<span class="sourceLineNo">349</span>        @Override<a name="line.349"></a>
-<span class="sourceLineNo">350</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.351"></a>
-<span class="sourceLineNo">352</span>        }<a name="line.352"></a>
-<span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>        @Override<a name="line.354"></a>
-<span class="sourceLineNo">355</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.356"></a>
-<span class="sourceLineNo">357</span>            promise<a name="line.357"></a>
-<span class="sourceLineNo">358</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>          } else {<a name="line.359"></a>
-<span class="sourceLineNo">360</span>            super.userEventTriggered(ctx, evt);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>          }<a name="line.361"></a>
-<span class="sourceLineNo">362</span>        }<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>        @Override<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>          promise.tryFailure(cause);<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        }<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      });<a name="line.368"></a>
-<span class="sourceLineNo">369</span>  }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>  private static void requestWriteBlock(Channel channel, StorageType storageType,<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    OpWriteBlockProto proto =<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    int protoLen = proto.getSerializedSize();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    ByteBuf buffer =<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    channel.writeAndFlush(buffer);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  }<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      StorageType storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      throws IOException {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>      @Override<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        if (future.isSuccess()) {<a name="line.394"></a>
-<span class="sourceLineNo">395</span>          // setup response processing pipeline first, then send request.<a name="line.395"></a>
-<span class="sourceLineNo">396</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.396"></a>
-<span class="sourceLineNo">397</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        } else {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>          promise.tryFailure(future.cause());<a name="line.399"></a>
-<span class="sourceLineNo">400</span>        }<a name="line.400"></a>
-<span class="sourceLineNo">401</span>      }<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    });<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
-<span class="sourceLineNo">404</span><a name="line.404"></a>
-<span class="sourceLineNo">405</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.407"></a>
-<span class="sourceLineNo">408</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    StorageType[] storageTypes = locatedBlock.getStorageTypes();<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    boolean connectToDnViaHostname =<a name="line.411"></a>
-<span class="sourceLineNo">412</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PBHelperClient.convert(blockCopy))<a name="line.417"></a>
-<span class="sourceLineNo">418</span>        .setToken(PBHelperClient.convert(locatedBlock.getBlockToken())))<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      .setClientName(clientName).build();<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.423"></a>
-<span class="sourceLineNo">424</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.424"></a>
-<span class="sourceLineNo">425</span>        .setRequestedChecksum(checksumProto)<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      StorageType storageType = storageTypes[i];<a name="line.430"></a>
-<span class="sourceLineNo">431</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      futureList.add(promise);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>            @Override<a name="line.437"></a>
-<span class="sourceLineNo">438</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.438"></a>
-<span class="sourceLineNo">439</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.439"></a>
-<span class="sourceLineNo">440</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.440"></a>
-<span class="sourceLineNo">441</span>              // a null handler.<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            }<a name="line.442"></a>
-<span class="sourceLineNo">443</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>            @Override<a name="line.445"></a>
-<span class="sourceLineNo">446</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>              if (future.isSuccess()) {<a name="line.447"></a>
-<span class="sourceLineNo">448</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.448"></a>
-<span class="sourceLineNo">449</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.449"></a>
-<span class="sourceLineNo">450</span>              } else {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>                promise.tryFailure(future.cause());<a name="line.451"></a>
-<span class="sourceLineNo">452</span>              }<a name="line.452"></a>
-<span class="sourceLineNo">453</span>            }<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          });<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    }<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    return futureList;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>  }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  public static class NameNodeException extends IOException {<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>    public NameNodeException(Throwable cause) {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      super(cause);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    }<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
-<span class="sourceLineNo">470</span><a name="line.470"></a>
-<span class="sourceLineNo">471</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.471"></a>
-<span class="sourceLineNo">472</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.472"></a>
-<span class="sourceLineNo">473</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    Configuration conf = dfs.getConf();<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    DFSClient client = dfs.getClient();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    String clientName = client.getClientName();<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    ClientProtocol namenode = client.getNamenode();<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.479"></a>
-<span class="sourceLineNo">480</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    for (int retry = 0;; retry++) {<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      HdfsFileStatus stat;<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.486"></a>
-<span class="sourceLineNo">487</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      } catch (Exception e) {<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        if (e instanceof RemoteException) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>          throw (RemoteException) e;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        } else {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>          throw new NameNodeException(e);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        }<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      }<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      beginFileLease(client, stat.getFileId());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      boolean succ = false;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      LocatedBlock locatedBlock = null;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      try {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>        DataChecksum summer = createChecksum(client);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        locatedBlock = namenode.addBlock(src, client.getClientName(), null, excludesNodes,<a name="line.502"></a>
-<span class="sourceLineNo">503</span>          stat.getFileId(), null, null);<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          try {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          } catch (Exception e) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>            // exclude the broken DN next time<a name="line.511"></a>
-<span class="sourceLineNo">512</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.512"></a>
-<span class="sourceLineNo">513</span>            throw e;<a name="line.513"></a>
-<span class="sourceLineNo">514</span>          }<a name="line.514"></a>
-<span class="sourceLineNo">515</span>        }<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.516"></a>
-<span class="sourceLineNo">517</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.518"></a>
-<span class="sourceLineNo">519</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.519"></a>
-<span class="sourceLineNo">520</span>        succ = true;<a name="line.520"></a>
-<span class="sourceLineNo">521</span>        return output;<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      } catch (RemoteException e) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.523"></a>
-<span class="sourceLineNo">524</span>        if (shouldRetryCreate(e)) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          if (retry &gt;= createMaxRetries) {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>            throw e.unwrapRemoteException();<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          }<a name="line.527"></a>
-<span class="sourceLineNo">528</span>        } else {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          throw e.unwrapRemoteException();<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      } catch (IOException e) {<a name="line.531"></a>
-<span class="sourceLineNo">532</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>        if (retry &gt;= createMaxRetries) {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          throw e;<a name="line.534"></a>
-<span class="sourceLineNo">535</span>        }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>        // overwrite the old broken file.<a name="line.536"></a>
-<span class="sourceLineNo">537</span>        overwrite = true;<a name="line.537"></a>
-<span class="sourceLineNo">538</span>        try {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.539"></a>
-<span class="sourceLineNo">540</span>        } catch (InterruptedException ie) {<a name="line.540"></a>
-<span class="sourceLineNo">541</span>          throw new InterruptedIOException();<a name="line.541"></a>
-<span class="sourceLineNo">542</span>        }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      } finally {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>        if (!succ) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>          if (futureList != null) {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>                @Override<a name="line.549"></a>
-<span class="sourceLineNo">550</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.550"></a>
-<span class="sourceLineNo">551</span>                  if (future.isSuccess()) {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>                    future.getNow().close();<a name="line.552"></a>
-<span class="sourceLineNo">553</span>                  }<a name="line.553"></a>
-<span class="sourceLineNo">554</span>                }<a name="line.554"></a>
-<span class="sourceLineNo">555</span>              });<a name="line.555"></a>
-<span class="sourceLineNo">556</span>            }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          }<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          endFileLease(client, stat.getFileId());<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * inside an {@link EventLoop}.<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.569"></a>
-<span class="sourceLineNo">570</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.571"></a>
-<span class="sourceLineNo">572</span><a name="line.572"></a>
-<span class="sourceLineNo">573</span>      @Override<a name="line.573"></a>
-<span class="sourceLineNo">574</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.574"></a>
-<span class="sourceLineNo">575</span>          throws IOException, UnresolvedLinkException {<a name="line.575"></a>
-<span class="sourceLineNo">576</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.576"></a>
-<span class="sourceLineNo">577</span>          blockSize, eventLoopGroup, channelClass);<a name="line.577"></a>
+<span class="sourceLineNo">290</span>  static {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    try {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      LEASE_MANAGER = createLeaseManager();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.293"></a>
+<span class="sourceLineNo">294</span>      FILE_CREATOR = createFileCreator();<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    } catch (Exception e) {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.297"></a>
+<span class="sourceLineNo">298</span>          "HBASE-16110 for more information.";<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      LOG.error(msg, e);<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      throw new Error(msg, e);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    }<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
+<span class="sourceLineNo">303</span><a name="line.303"></a>
+<span class="sourceLineNo">304</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  }<a name="line.306"></a>
+<span class="sourceLineNo">307</span><a name="line.307"></a>
+<span class="sourceLineNo">308</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    return client.getConf().createChecksum(null);<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  }<a name="line.314"></a>
+<span class="sourceLineNo">315</span><a name="line.315"></a>
+<span class="sourceLineNo">316</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    List&lt;Integer&gt; flagList = ack.getFlagList();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    Integer headerFlag;<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    if (flagList.isEmpty()) {<a name="line.319"></a>
+<span class="sourceLineNo">320</span>      Status reply = ack.getReply(0);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      headerFlag = PipelineAck.combineHeader(ECN.DISABLED, reply);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    } else {<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      headerFlag = flagList.get(0);<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    }<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    return PipelineAck.getStatusFromHeader(headerFlag);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.328"></a>
+<span class="sourceLineNo">329</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.330"></a>
+<span class="sourceLineNo">331</span>      new ProtobufVarint32FrameDecoder(),<a name="line.331"></a>
+<span class="sourceLineNo">332</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.333"></a>
+<span class="sourceLineNo">334</span><a name="line.334"></a>
+<span class="sourceLineNo">335</span>        @Override<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.336"></a>
+<span class="sourceLineNo">337</span>            throws Exception {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          Status pipelineStatus = resp.getStatus();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.339"></a>
+<span class="sourceLineNo">340</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.340"></a>
+<span class="sourceLineNo">341</span>          }<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.343"></a>
+<span class="sourceLineNo">344</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.344"></a>
+<span class="sourceLineNo">345</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.345"></a>
+<span class="sourceLineNo">346</span>                  resp.getMessage() + ", " + logInfo);<a name="line.346"></a>
+<span class="sourceLineNo">347</span>            } else {<a name="line.347"></a>
+<span class="sourceLineNo">348</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.348"></a>
+<span class="sourceLineNo">349</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.349"></a>
+<span class="sourceLineNo">350</span>            }<a name="line.350"></a>
+<span class="sourceLineNo">351</span>          }<a name="line.351"></a>
+<span class="sourceLineNo">352</span>          // success<a name="line.352"></a>
+<span class="sourceLineNo">353</span>          ChannelPipeline p = ctx.pipeline();<a name="line.353"></a>
+<span class="sourceLineNo">354</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.354"></a>
+<span class="sourceLineNo">355</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.355"></a>
+<span class="sourceLineNo">356</span>            // of pipeline.<a name="line.356"></a>
+<span class="sourceLineNo">357</span>            if (handler instanceof IdleStateHandler) {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>              break;<a name="line.358"></a>
+<span class="sourceLineNo">359</span>            }<a name="line.359"></a>
+<span class="sourceLineNo">360</span>          }<a name="line.360"></a>
+<span class="sourceLineNo">361</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.361"></a>
+<span class="sourceLineNo">362</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.362"></a>
+<span class="sourceLineNo">363</span>          ctx.channel().config().setAutoRead(false);<a name="line.363"></a>
+<span class="sourceLineNo">364</span>          promise.trySuccess(ctx.channel());<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
+<span class="sourceLineNo">366</span><a name="line.366"></a>
+<span class="sourceLineNo">367</span>        @Override<a name="line.367"></a>
+<span class="sourceLineNo">368</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.369"></a>
+<span class="sourceLineNo">370</span>        }<a name="line.370"></a>
+<span class="sourceLineNo">371</span><a name="line.371"></a>
+<span class="sourceLineNo">372</span>        @Override<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.373"></a>
+<span class="sourceLineNo">374</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>            promise<a name="line.375"></a>
+<span class="sourceLineNo">376</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.376"></a>
+<span class="sourceLineNo">377</span>          } else {<a name="line.377"></a>
+<span class="sourceLineNo">378</span>            super.userEventTriggered(ctx, evt);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>          }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>        }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>        @Override<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          promise.tryFailure(cause);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>        }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      });<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  }<a name="line.387"></a>
+<span class="sourceLineNo">388</span><a name="line.388"></a>
+<span class="sourceLineNo">389</span>  private static void requestWriteBlock(Channel channel, StorageType storageType,<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    OpWriteBlockProto proto =<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();<a name="line.392"></a>
+<span class="sourceLineNo">393</span>    int protoLen = proto.getSerializedSize();<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    ByteBuf buffer =<a name="line.394"></a>
+<span class="sourceLineNo">395</span>      channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.397"></a>
+<span class="sourceLineNo">398</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    channel.writeAndFlush(buffer);<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  }<a name="line.400"></a>
+<span class="sourceLineNo">401</span><a name="line.401"></a>
+<span class="sourceLineNo">402</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.402"></a>
+<span class="sourceLineNo">403</span>      StorageType storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.403"></a>
+<span class="sourceLineNo">404</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.404"></a>
+<span class="sourceLineNo">405</span>      throws IOException {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.406"></a>
+<span class="sourceLineNo">407</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.407"></a>
+<span class="sourceLineNo">408</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.408"></a>
+<span class="sourceLineNo">409</span><a name="line.409"></a>
+<span class="sourceLineNo">410</span>      @Override<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>        if (future.isSuccess()) {<a name="line.412"></a>
+<span class="sourceLineNo">413</span>          // setup response processing pipeline first, then send request.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.415"></a>
+<span class="sourceLineNo">416</span>        } else {<a name="line.416"></a>
+<span class="sourceLineNo">417</span>          promise.tryFailure(future.cause());<a name="line.417"></a>
+<span class="sourceLineNo">418</span>        }<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      }<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    });<a name="line.420"></a>
+<span class="sourceLineNo">421</span>  }<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.425"></a>
+<span class="sourceLineNo">426</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    StorageType[] storageTypes = locatedBlock.getStorageTypes();<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.428"></a>
+<span class="sourceLineNo">429</span>    boolean connectToDnViaHostname =<a name="line.429"></a>
+<span class="sourceLineNo">430</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.432"></a>
+<span class="sourceLineNo">433</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.434"></a>
+<span class="sourceLineNo">435</span>      .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PBHelperClient.convert(blockCopy))<a name="line.435"></a>
+<span class="sourceLineNo">436</span>        .setToken(PBHelperClient.convert(locatedBlock.getBlockToken())))<a name="line.436"></a>
+<span class="sourceLineNo">437</span>      .setClientName(clientName).build();<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.438"></a>
+<span class="sourceLineNo">439</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        .setRequestedChecksum(checksumProto)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.445"></a>
+<span class="sourceLineNo">446</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.447"></a>
+<span class="sourceLineNo">448</span>      StorageType storageType = storageTypes[i];<a name="line.448"></a>
+<span class="sourceLineNo">449</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>      futureList.add(promise);<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.453"></a>
+<span class="sourceLineNo">454</span><a name="line.454"></a>
+<span class="sourceLineNo">455</span>            @Override<a name="line.455"></a>
+<span class="sourceLineNo">456</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.456"></a>
+<span class="sourceLineNo">457</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.457"></a>
+<span class="sourceLineNo">458</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.458"></a>
+<span class="sourceLineNo">459</span>              // a null handler.<a name="line.459"></a>
+<span class="sourceLineNo">460</span>            }<a name="line.460"></a>
+<span class="sourceLineNo">461</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.461"></a>
+<span class="sourceLineNo">462</span><a name="line.462"></a>
+<span class="sourceLineNo">463</span>            @Override<a name="line.463"></a>
+<span class="sourceLineNo">464</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>              if (future.isSuccess()) {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.466"></a>
+<span class="sourceLineNo">467</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.467"></a>
+<span class="sourceLineNo">468</span>              } else {<a name="line.468"></a>
+<span class="sourceLineNo">469</span>                promise.tryFailure(future.cause());<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              }<a name="line.470"></a>
+<span class="sourceLineNo">471</span>            }<a name="line.471"></a>
+<span class="sourceLineNo">472</span>          });<a name="line.472"></a>
+<span class="sourceLineNo">473</span>    }<a name="line.473"></a>
+<span class="sourceLineNo">474</span>    return futureList;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>  }<a name="line.475"></a>
+<span class="sourceLineNo">476</span><a name="line.476"></a>
+<span class="sourceLineNo">477</span>  /**<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   */<a name="line.479"></a>
+<span class="sourceLineNo">480</span>  public static class NameNodeException extends IOException {<a name="line.480"></a>
+<span class="sourceLineNo">481</span><a name="line.481"></a>
+<span class="sourceLineNo">482</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>    public NameNodeException(Throwable cause) {<a name="line.484"></a>
+<span class="sourceLineNo">485</span>      super(cause);<a name="line.485"></a>
+<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
+<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
+<span class="sourceLineNo">488</span><a name="line.488"></a>
+<span class="sourceLineNo">489</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.490"></a>
+<span class="sourceLineNo">491</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    Configuration conf = dfs.getConf();<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    DFSClient client = dfs.getClient();<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    String clientName = client.getClientName();<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    ClientProtocol namenode = client.getNamenode();<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    for (int retry = 0;; retry++) {<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      HdfsFileStatus stat;<a name="line.501"></a>
+<span class="sourceLineNo">502</span>      try {<a name="line.502"></a>
+<span class="sourceLineNo">503</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.503"></a>
+<span class="sourceLineNo">504</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.504"></a>
+<span class="sourceLineNo">505</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.505"></a>
+<span class="sourceLineNo">506</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.506"></a>
+<span class="sourceLineNo">507</span>      } catch (Exception e) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>        if (e instanceof RemoteException) {<a name="line.508"></a>
+<span class="sourceLineNo">509</span>          throw (RemoteException) e;<a name="line.509"></a>
+<span class="sourceLineNo">510</span>        } else {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>          throw new NameNodeException(e);<a name="line.511"></a>
+<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      }<a name="line.513"></a>
+<span class="sourceLineNo">514</span>      beginFileLease(client, stat.getFileId());<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      boolean succ = false;<a name="line.515"></a>
+<span class="sourceLineNo">516</span>      LocatedBlock locatedBlock = null;<a name="line.516"></a>
+<span class="sourceLineNo">517</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.517"></a>
+<span class="sourceLineNo">518</span>      try {<a name="line.518"></a>
+<span class="sourceLineNo">519</span>        DataChecksum summer = createChecksum(client);<a name="line.519"></a>
+<span class="sourceLineNo">520</span>        locatedBlock = namenode.addBlock(src, client.getClientName(), null, excludesNodes,<a name="line.520"></a>
+<span class="sourceLineNo">521</span>          stat.getFileId(), null, null);<a name="line.521"></a>
+<span class="sourceLineNo">522</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.523"></a>
+<span class="sourceLineNo">524</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.524"></a>
+<span class="sourceLineNo">525</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          try {<a name="line.526"></a>
+<span class="sourceLineNo">527</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.527"></a>
+<span class="sourceLineNo">528</span>          } catch (Exception e) {<a name="line.528"></a>
+<span class="sourceLineNo">529</span>            // exclude the broken DN next time<a name="line.529"></a>
+<span class="sourceLineNo">530</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.530"></a>
+<span class="sourceLineNo">531</span>            throw e;<a name="line.531"></a>
+<span class="sourceLineNo">532</span>          }<a name="line.532"></a>
+<span class="sourceLineNo">533</span>        }<a name="line.533"></a>
+<span class="sourceLineNo">534</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.534"></a>
+<span class="sourceLineNo">535</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.535"></a>
+<span class="sourceLineNo">536</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.536"></a>
+<span class="sourceLineNo">537</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        succ = true;<a name="line.538"></a>
+<span class="sourceLineNo">539</span>        return output;<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      } catch (RemoteException e) {<a name="line.540"></a>
+<span class="sourceLineNo">541</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>        if (shouldRetryCreate(e)) {<a name="line.542"></a>
+<span class="sourceLineNo">543</span>          if (retry &gt;= createMaxRetries) {<a name="line.543"></a>
+<span class="sourceLineNo">544</span>            throw e.unwrapRemoteException();<a name="line.544"></a>
+<span class="sourceLineNo">545</span>          }<a name="line.545"></a>
+<span class="sourceLineNo">546</span>        } else {<a name="line.546"></a>
+<span class="sourceLineNo">547</span>          throw e.unwrapRemoteException();<a name="line.547"></a>
+<span class="sourceLineNo">548</span>        }<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      } catch (IOException e) {<a name="line.549"></a>
+<span class="sourceLineNo">550</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.550"></a>
+<span class="sourceLineNo">551</span>        if (retry &gt;= createMaxRetries) {<a name="line.551"></a>
+<span class="sourceLineNo">552</span>          throw e;<a name="line.552"></a>
+<span class="sourceLineNo">553</span>        }<a name="line.553"></a>
+<span class="sourceLineNo">554</span>        // overwrite the old broken file.<a name="line.554"></a>
+<span class="sourceLineNo">555</span>        overwrite = true;<a name="line.555"></a>
+<span class="sourceLineNo">556</span>        try {<a name="line.556"></a>
+<span class="sourceLineNo">557</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.557"></a>
+<span class="sourceLineNo">558</span>        } catch (InterruptedException ie) {<a name="line.558"></a>
+<span class="sourceLineNo">559</span>          throw new InterruptedIOException();<a name="line.559"></a>
+<span class="sourceLineNo">560</span>        }<a name="line.560"></a>
+<span class="sourceLineNo">561</span>      } finally {<a name="line.561"></a>
+<span class="sourceLineNo">562</span>        if (!succ) {<a name="line.562"></a>
+<span class="sourceLineNo">563</span>          if (futureList != null) {<a name="line.563"></a>
+<span class="sourceLineNo">564</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.565"></a>
+<span class="sourceLineNo">566</span><a name="line.566"></a>
+<span class="sourceLineNo">567</span>                @Override<a name="line.567"></a>
+<span class="sourceLineNo">568</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.568"></a>
+<span class="sourceLineNo">569</span>                  if (future.isSuccess()) {<a name="line.569"></a>
+<span class="sourceLineNo">570</span>                    future.getNow().close();<a name="line.570"></a>
+<span class="sourceLineNo">571</span>                  }<a name="line.571"></a>
+<span class="sourceLineNo">572</span>                }<a name="line.572"></a>
+<span class="sourceLineNo">573</span>              });<a name="line.573"></a>
+<span class="sourceLineNo">574</span>            }<a name="line.574"></a>
+<span class="sourceLineNo">575</span>          }<a name="line.575"></a>
+<span class="sourceLineNo">576</span>          endFileLease(client, stat.getFileId());<a name="line.576"></a>
+<span class="sourceLineNo">577</span>        }<a name="line.577"></a>
 <span class="sourceLineNo">578</span>      }<a name="line.578"></a>
-<span class="sourceLineNo">579</span><a name="line.579"></a>
-<span class="sourceLineNo">580</span>      @Override<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>        throw new UnsupportedOperationException();<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      }<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }.resolve(dfs, f);<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.589"></a>
-<span class="sourceLineNo">590</span>    // DFSOutputStream.newStreamForCreate.<a name="line.590"></a>
-<span class="sourceLineNo">591</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.591"></a>
-<span class="sourceLineNo">592</span>  }<a name="line.592"></a>
-<span class="sourceLineNo">593</span><a name="line.593"></a>
-<span class="sourceLineNo">594</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      ExtendedBlock block, long fileId) {<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    for (int retry = 0;; retry++) {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      try {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>          endFileLease(client, fileId);<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          return;<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } else {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      } catch (RemoteException e) {<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        IOException ioe = e.unwrapRemoteException();<a name="line.605"></a>
-<span class="sourceLineNo">606</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>          return;<a name="line.608"></a>
-<span class="sourceLineNo">609</span>        } else {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        }<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      } catch (Exception e) {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      }<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      sleepIgnoreInterrupt(retry);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    }<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  }<a name="line.617"></a>
-<span class="sourceLineNo">618</span><a name="line.618"></a>
-<span class="sourceLineNo">619</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    try {<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    } catch (InterruptedException e) {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    }<a name="line.623"></a>
-<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
-<span class="sourceLineNo">625</span>}<a name="line.625"></a>
+<span class="sourceLineNo">579</span>    }<a name="line.579"></a>
+<span class="sourceLineNo">580</span>  }<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>  /**<a name="line.582"></a>
+<span class="sourceLineNo">583</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.583"></a>
+<span class="sourceLineNo">584</span>   * inside an {@link EventLoop}.<a name="line.584"></a>
+<span class="sourceLineNo">585</span>   */<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.587"></a>
+<span class="sourceLineNo">588</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.588"></a>
+<span class="sourceLineNo">589</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.589"></a>
+<span class="sourceLineNo">590</span><a name="line.590"></a>
+<span class="sourceLineNo">591</span>      @Override<a name="line.591"></a>
+<span class="sourceLineNo">592</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.592"></a>
+<span class="sourceLineNo">593</span>          throws IOException, UnresolvedLinkException {<a name="line.593"></a>
+<span class="sourceLineNo">594</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.594"></a>
+<span class="sourceLineNo">595</span>          blockSize, eventLoopGroup, channelClass);<a name="line.595"></a>
+<span class="sourceLineNo">596</span>      }<a name="line.596"></a>
+<span class="sourceLineNo">597</span><a name="line.597"></a>
+<span class="sourceLineNo">598</span>      @Override<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.599"></a>
+<span class="sourceLineNo">600</span>        throw new UnsupportedOperationException();<a name="line.600"></a>
+<span class="sourceLineNo">601</span>      }<a name="line.601"></a>
+<span class="sourceLineNo">602</span>    }.resolve(dfs, f);<a name="line.602"></a>
+<span class="sourceLineNo">603</span>  }<a name="line.603"></a>
+<span class="sourceLineNo">604</span><a name="line.604"></a>
+<span class="sourceLineNo">605</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.607"></a>
+<span class="sourceLineNo">608</span>    // DFSOutputStream.newStreamForCreate.<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.609"></a>
+<span class="sourceLineNo">610</span>  }<a name="line.610"></a>
+<span class="sourceLineNo">611</span><a name="line.611"></a>
+<span class="sourceLineNo">612</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.612"></a>
+<span class="sourceLineNo">613</span>      ExtendedBlock block, long fileId) {<a name="line.613"></a>
+<span class="sourceLineNo">614</span>    for (int retry = 0;; retry++) {<a name="line.614"></a>
+<span class="sourceLineNo">615</span>      try {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.616"></a>
+<span class="sourceLineNo">617</span>          endFileLease(client, fileId);<a name="line.617"></a>
+<span class="sourceLineNo">618</span>          return;<a name="line.618"></a>
+<span class="sourceLineNo">619</span>        } else {<a name="line.619"></a>
+<span class="sourceLineNo">620</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.620"></a>
+<span class="sourceLineNo">621</span>        }<a name="line.621"></a>
+<span class="sourceLineNo">622</span>      } catch (RemoteException e) {<a name="line.622"></a>
+<span class="sourceLineNo">623</span>        IOException ioe = e.unwrapRemoteException();<a name="line.623"></a>
+<span class="sourceLineNo">624</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.624"></a>
+<span class="sourceLineNo">625</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.625"></a>
+<span class="sourceLineNo">626</span>          return;<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        } else {<a name="line.627"></a>
+<span class="sourceLineNo">628</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.628"></a>
+<span class="sourceLineNo">629</span>        }<a name="line.629"></a>
+<span class="sourceLineNo">630</span>      } catch (Exception e) {<a name="line.630"></a>
+<span class="sourceLineNo">631</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      }<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      sleepIgnoreInterrupt(retry);<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
+<span class="sourceLineNo">635</span>  }<a name="line.635"></a>
+<span class="sourceLineNo">636</span><a name="line.636"></a>
+<span class="sourceLineNo">637</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.637"></a>
+<span class="sourceLineNo">638</span>    try {<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.639"></a>
+<span class="sourceLineNo">640</span>    } catch (InterruptedException e) {<a name="line.640"></a>
+<span class="sourceLineNo">641</span>    }<a name="line.641"></a>
+<span class="sourceLineNo">642</span>  }<a name="line.642"></a>
+<span class="sourceLineNo">643</span>}<a name="line.643"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html
index 6684af5..3556576 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.FileCreator.html
@@ -229,408 +229,426 @@
 <span class="sourceLineNo">221</span>    };<a name="line.221"></a>
 <span class="sourceLineNo">222</span>  }<a name="line.222"></a>
 <span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.224"></a>
+<span class="sourceLineNo">224</span>  private static FileCreator createFileCreator3_3() throws NoSuchMethodException {<a name="line.224"></a>
 <span class="sourceLineNo">225</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.226"></a>
-<span class="sourceLineNo">227</span>      CryptoProtocolVersion[].class, String.class);<a name="line.227"></a>
+<span class="sourceLineNo">226</span>        String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        CryptoProtocolVersion[].class, String.class, String.class);<a name="line.227"></a>
 <span class="sourceLineNo">228</span><a name="line.228"></a>
 <span class="sourceLineNo">229</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.229"></a>
 <span class="sourceLineNo">230</span>        supportedVersions) -&gt; {<a name="line.230"></a>
 <span class="sourceLineNo">231</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.231"></a>
-<span class="sourceLineNo">232</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.232"></a>
+<span class="sourceLineNo">232</span>          createParent, replication, blockSize, supportedVersions, null, null);<a name="line.232"></a>
 <span class="sourceLineNo">233</span>    };<a name="line.233"></a>
 <span class="sourceLineNo">234</span>  }<a name="line.234"></a>
 <span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.236"></a>
+<span class="sourceLineNo">236</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.236"></a>
 <span class="sourceLineNo">237</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.237"></a>
 <span class="sourceLineNo">238</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      CryptoProtocolVersion[].class);<a name="line.239"></a>
+<span class="sourceLineNo">239</span>      CryptoProtocolVersion[].class, String.class);<a name="line.239"></a>
 <span class="sourceLineNo">240</span><a name="line.240"></a>
 <span class="sourceLineNo">241</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.241"></a>
 <span class="sourceLineNo">242</span>        supportedVersions) -&gt; {<a name="line.242"></a>
 <span class="sourceLineNo">243</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        createParent, replication, blockSize, supportedVersions);<a name="line.244"></a>
+<span class="sourceLineNo">244</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.244"></a>
 <span class="sourceLineNo">245</span>    };<a name="line.245"></a>
 <span class="sourceLineNo">246</span>  }<a name="line.246"></a>
 <span class="sourceLineNo">247</span><a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    try {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return createFileCreator3();<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    } catch (NoSuchMethodException e) {<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    }<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    return createFileCreator2();<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  }<a name="line.255"></a>
-<span class="sourceLineNo">256</span><a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // cancel the processing if DFSClient is already closed.<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.258"></a>
+<span class="sourceLineNo">248</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      CryptoProtocolVersion[].class);<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        supportedVersions) -&gt; {<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.255"></a>
+<span class="sourceLineNo">256</span>        createParent, replication, blockSize, supportedVersions);<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    };<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  }<a name="line.258"></a>
 <span class="sourceLineNo">259</span><a name="line.259"></a>
-<span class="sourceLineNo">260</span>    private final DFSClient client;<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    public CancelOnClose(DFSClient client) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      this.client = client;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
-<span class="sourceLineNo">265</span><a name="line.265"></a>
-<span class="sourceLineNo">266</span>    @Override<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    public boolean progress() {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  static {<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    try {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      LEASE_MANAGER = createLeaseManager();<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      FILE_CREATOR = createFileCreator();<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    } catch (Exception e) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.278"></a>
-<span class="sourceLineNo">279</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          "HBASE-16110 for more information.";<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      LOG.error(msg, e);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      throw new Error(msg, e);<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
-<span class="sourceLineNo">285</span><a name="line.285"></a>
-<span class="sourceLineNo">286</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.287"></a>
+<span class="sourceLineNo">260</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    try {<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      return createFileCreator3_3();<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    } catch (NoSuchMethodException e) {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 3.2 or below");<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    }<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>    try {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      return createFileCreator3();<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    } catch (NoSuchMethodException e) {<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    }<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    return createFileCreator2();<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  }<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>  // cancel the processing if DFSClient is already closed.<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.276"></a>
+<span class="sourceLineNo">277</span><a name="line.277"></a>
+<span class="sourceLineNo">278</span>    private final DFSClient client;<a name="line.278"></a>
+<span class="sourceLineNo">279</span><a name="line.279"></a>
+<span class="sourceLineNo">280</span>    public CancelOnClose(DFSClient client) {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      this.client = client;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    public boolean progress() {<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
 <span class="sourceLineNo">288</span>  }<a name="line.288"></a>
 <span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  }<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    return client.getConf().createChecksum(null);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  }<a name="line.296"></a>
-<span class="sourceLineNo">297</span><a name="line.297"></a>
-<span class="sourceLineNo">298</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    List&lt;Integer&gt; flagList = ack.getFlagList();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    Integer headerFlag;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    if (flagList.isEmpty()) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      Status reply = ack.getReply(0);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      headerFlag = PipelineAck.combineHeader(ECN.DISABLED, reply);<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    } else {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      headerFlag = flagList.get(0);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    return PipelineAck.getStatusFromHeader(headerFlag);<a name="line.307"></a>
-<span class="sourceLineNo">308</span>  }<a name="line.308"></a>
-<span class="sourceLineNo">309</span><a name="line.309"></a>
-<span class="sourceLineNo">310</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      new ProtobufVarint32FrameDecoder(),<a name="line.313"></a>
-<span class="sourceLineNo">314</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>        @Override<a name="line.317"></a>
-<span class="sourceLineNo">318</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.318"></a>
-<span class="sourceLineNo">319</span>            throws Exception {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>          Status pipelineStatus = resp.getStatus();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.322"></a>
-<span class="sourceLineNo">323</span>          }<a name="line.323"></a>
-<span class="sourceLineNo">324</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.324"></a>
-<span class="sourceLineNo">325</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.327"></a>
-<span class="sourceLineNo">328</span>                  resp.getMessage() + ", " + logInfo);<a name="line.328"></a>
-<span class="sourceLineNo">329</span>            } else {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.330"></a>
-<span class="sourceLineNo">331</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>            }<a name="line.332"></a>
-<span class="sourceLineNo">333</span>          }<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // success<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          ChannelPipeline p = ctx.pipeline();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.337"></a>
-<span class="sourceLineNo">338</span>            // of pipeline.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>            if (handler instanceof IdleStateHandler) {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>              break;<a name="line.340"></a>
-<span class="sourceLineNo">341</span>            }<a name="line.341"></a>
-<span class="sourceLineNo">342</span>          }<a name="line.342"></a>
-<span class="sourceLineNo">343</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.343"></a>
-<span class="sourceLineNo">344</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>          ctx.channel().config().setAutoRead(false);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>          promise.trySuccess(ctx.channel());<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
-<span class="sourceLineNo">348</span><a name="line.348"></a>
-<span class="sourceLineNo">349</span>        @Override<a name="line.349"></a>
-<span class="sourceLineNo">350</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.351"></a>
-<span class="sourceLineNo">352</span>        }<a name="line.352"></a>
-<span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>        @Override<a name="line.354"></a>
-<span class="sourceLineNo">355</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.356"></a>
-<span class="sourceLineNo">357</span>            promise<a name="line.357"></a>
-<span class="sourceLineNo">358</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>          } else {<a name="line.359"></a>
-<span class="sourceLineNo">360</span>            super.userEventTriggered(ctx, evt);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>          }<a name="line.361"></a>
-<span class="sourceLineNo">362</span>        }<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>        @Override<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>          promise.tryFailure(cause);<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        }<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      });<a name="line.368"></a>
-<span class="sourceLineNo">369</span>  }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>  private static void requestWriteBlock(Channel channel, StorageType storageType,<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    OpWriteBlockProto proto =<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    int protoLen = proto.getSerializedSize();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    ByteBuf buffer =<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    channel.writeAndFlush(buffer);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  }<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      StorageType storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      throws IOException {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>      @Override<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        if (future.isSuccess()) {<a name="line.394"></a>
-<span class="sourceLineNo">395</span>          // setup response processing pipeline first, then send request.<a name="line.395"></a>
-<span class="sourceLineNo">396</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.396"></a>
-<span class="sourceLineNo">397</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        } else {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>          promise.tryFailure(future.cause());<a name="line.399"></a>
-<span class="sourceLineNo">400</span>        }<a name="line.400"></a>
-<span class="sourceLineNo">401</span>      }<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    });<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
-<span class="sourceLineNo">404</span><a name="line.404"></a>
-<span class="sourceLineNo">405</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.407"></a>
-<span class="sourceLineNo">408</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    StorageType[] storageTypes = locatedBlock.getStorageTypes();<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    boolean connectToDnViaHostname =<a name="line.411"></a>
-<span class="sourceLineNo">412</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PBHelperClient.convert(blockCopy))<a name="line.417"></a>
-<span class="sourceLineNo">418</span>        .setToken(PBHelperClient.convert(locatedBlock.getBlockToken())))<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      .setClientName(clientName).build();<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.423"></a>
-<span class="sourceLineNo">424</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.424"></a>
-<span class="sourceLineNo">425</span>        .setRequestedChecksum(checksumProto)<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      StorageType storageType = storageTypes[i];<a name="line.430"></a>
-<span class="sourceLineNo">431</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      futureList.add(promise);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>            @Override<a name="line.437"></a>
-<span class="sourceLineNo">438</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.438"></a>
-<span class="sourceLineNo">439</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.439"></a>
-<span class="sourceLineNo">440</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.440"></a>
-<span class="sourceLineNo">441</span>              // a null handler.<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            }<a name="line.442"></a>
-<span class="sourceLineNo">443</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>            @Override<a name="line.445"></a>
-<span class="sourceLineNo">446</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>              if (future.isSuccess()) {<a name="line.447"></a>
-<span class="sourceLineNo">448</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.448"></a>
-<span class="sourceLineNo">449</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.449"></a>
-<span class="sourceLineNo">450</span>              } else {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>                promise.tryFailure(future.cause());<a name="line.451"></a>
-<span class="sourceLineNo">452</span>              }<a name="line.452"></a>
-<span class="sourceLineNo">453</span>            }<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          });<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    }<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    return futureList;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>  }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  public static class NameNodeException extends IOException {<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>    public NameNodeException(Throwable cause) {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      super(cause);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    }<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
-<span class="sourceLineNo">470</span><a name="line.470"></a>
-<span class="sourceLineNo">471</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.471"></a>
-<span class="sourceLineNo">472</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.472"></a>
-<span class="sourceLineNo">473</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    Configuration conf = dfs.getConf();<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    DFSClient client = dfs.getClient();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    String clientName = client.getClientName();<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    ClientProtocol namenode = client.getNamenode();<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.479"></a>
-<span class="sourceLineNo">480</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    for (int retry = 0;; retry++) {<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      HdfsFileStatus stat;<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.486"></a>
-<span class="sourceLineNo">487</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      } catch (Exception e) {<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        if (e instanceof RemoteException) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>          throw (RemoteException) e;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        } else {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>          throw new NameNodeException(e);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        }<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      }<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      beginFileLease(client, stat.getFileId());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      boolean succ = false;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      LocatedBlock locatedBlock = null;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      try {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>        DataChecksum summer = createChecksum(client);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        locatedBlock = namenode.addBlock(src, client.getClientName(), null, excludesNodes,<a name="line.502"></a>
-<span class="sourceLineNo">503</span>          stat.getFileId(), null, null);<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          try {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          } catch (Exception e) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>            // exclude the broken DN next time<a name="line.511"></a>
-<span class="sourceLineNo">512</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.512"></a>
-<span class="sourceLineNo">513</span>            throw e;<a name="line.513"></a>
-<span class="sourceLineNo">514</span>          }<a name="line.514"></a>
-<span class="sourceLineNo">515</span>        }<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.516"></a>
-<span class="sourceLineNo">517</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.518"></a>
-<span class="sourceLineNo">519</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.519"></a>
-<span class="sourceLineNo">520</span>        succ = true;<a name="line.520"></a>
-<span class="sourceLineNo">521</span>        return output;<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      } catch (RemoteException e) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.523"></a>
-<span class="sourceLineNo">524</span>        if (shouldRetryCreate(e)) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          if (retry &gt;= createMaxRetries) {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>            throw e.unwrapRemoteException();<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          }<a name="line.527"></a>
-<span class="sourceLineNo">528</span>        } else {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          throw e.unwrapRemoteException();<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      } catch (IOException e) {<a name="line.531"></a>
-<span class="sourceLineNo">532</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>        if (retry &gt;= createMaxRetries) {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          throw e;<a name="line.534"></a>
-<span class="sourceLineNo">535</span>        }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>        // overwrite the old broken file.<a name="line.536"></a>
-<span class="sourceLineNo">537</span>        overwrite = true;<a name="line.537"></a>
-<span class="sourceLineNo">538</span>        try {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.539"></a>
-<span class="sourceLineNo">540</span>        } catch (InterruptedException ie) {<a name="line.540"></a>
-<span class="sourceLineNo">541</span>          throw new InterruptedIOException();<a name="line.541"></a>
-<span class="sourceLineNo">542</span>        }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      } finally {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>        if (!succ) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>          if (futureList != null) {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>                @Override<a name="line.549"></a>
-<span class="sourceLineNo">550</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.550"></a>
-<span class="sourceLineNo">551</span>                  if (future.isSuccess()) {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>                    future.getNow().close();<a name="line.552"></a>
-<span class="sourceLineNo">553</span>                  }<a name="line.553"></a>
-<span class="sourceLineNo">554</span>                }<a name="line.554"></a>
-<span class="sourceLineNo">555</span>              });<a name="line.555"></a>
-<span class="sourceLineNo">556</span>            }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          }<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          endFileLease(client, stat.getFileId());<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * inside an {@link EventLoop}.<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.569"></a>
-<span class="sourceLineNo">570</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.571"></a>
-<span class="sourceLineNo">572</span><a name="line.572"></a>
-<span class="sourceLineNo">573</span>      @Override<a name="line.573"></a>
-<span class="sourceLineNo">574</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.574"></a>
-<span class="sourceLineNo">575</span>          throws IOException, UnresolvedLinkException {<a name="line.575"></a>
-<span class="sourceLineNo">576</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.576"></a>
-<span class="sourceLineNo">577</span>          blockSize, eventLoopGroup, channelClass);<a name="line.577"></a>
+<span class="sourceLineNo">290</span>  static {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    try {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      LEASE_MANAGER = createLeaseManager();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.293"></a>
+<span class="sourceLineNo">294</span>      FILE_CREATOR = createFileCreator();<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    } catch (Exception e) {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.297"></a>
+<span class="sourceLineNo">298</span>          "HBASE-16110 for more information.";<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      LOG.error(msg, e);<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      throw new Error(msg, e);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    }<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
+<span class="sourceLineNo">303</span><a name="line.303"></a>
+<span class="sourceLineNo">304</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  }<a name="line.306"></a>
+<span class="sourceLineNo">307</span><a name="line.307"></a>
+<span class="sourceLineNo">308</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    return client.getConf().createChecksum(null);<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  }<a name="line.314"></a>
+<span class="sourceLineNo">315</span><a name="line.315"></a>
+<span class="sourceLineNo">316</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    List&lt;Integer&gt; flagList = ack.getFlagList();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    Integer headerFlag;<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    if (flagList.isEmpty()) {<a name="line.319"></a>
+<span class="sourceLineNo">320</span>      Status reply = ack.getReply(0);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      headerFlag = PipelineAck.combineHeader(ECN.DISABLED, reply);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    } else {<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      headerFlag = flagList.get(0);<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    }<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    return PipelineAck.getStatusFromHeader(headerFlag);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.328"></a>
+<span class="sourceLineNo">329</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.330"></a>
+<span class="sourceLineNo">331</span>      new ProtobufVarint32FrameDecoder(),<a name="line.331"></a>
+<span class="sourceLineNo">332</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.333"></a>
+<span class="sourceLineNo">334</span><a name="line.334"></a>
+<span class="sourceLineNo">335</span>        @Override<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.336"></a>
+<span class="sourceLineNo">337</span>            throws Exception {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          Status pipelineStatus = resp.getStatus();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.339"></a>
+<span class="sourceLineNo">340</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.340"></a>
+<span class="sourceLineNo">341</span>          }<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.343"></a>
+<span class="sourceLineNo">344</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.344"></a>
+<span class="sourceLineNo">345</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.345"></a>
+<span class="sourceLineNo">346</span>                  resp.getMessage() + ", " + logInfo);<a name="line.346"></a>
+<span class="sourceLineNo">347</span>            } else {<a name="line.347"></a>
+<span class="sourceLineNo">348</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.348"></a>
+<span class="sourceLineNo">349</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.349"></a>
+<span class="sourceLineNo">350</span>            }<a name="line.350"></a>
+<span class="sourceLineNo">351</span>          }<a name="line.351"></a>
+<span class="sourceLineNo">352</span>          // success<a name="line.352"></a>
+<span class="sourceLineNo">353</span>          ChannelPipeline p = ctx.pipeline();<a name="line.353"></a>
+<span class="sourceLineNo">354</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.354"></a>
+<span class="sourceLineNo">355</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.355"></a>
+<span class="sourceLineNo">356</span>            // of pipeline.<a name="line.356"></a>
+<span class="sourceLineNo">357</span>            if (handler instanceof IdleStateHandler) {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>              break;<a name="line.358"></a>
+<span class="sourceLineNo">359</span>            }<a name="line.359"></a>
+<span class="sourceLineNo">360</span>          }<a name="line.360"></a>
+<span class="sourceLineNo">361</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.361"></a>
+<span class="sourceLineNo">362</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.362"></a>
+<span class="sourceLineNo">363</span>          ctx.channel().config().setAutoRead(false);<a name="line.363"></a>
+<span class="sourceLineNo">364</span>          promise.trySuccess(ctx.channel());<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
+<span class="sourceLineNo">366</span><a name="line.366"></a>
+<span class="sourceLineNo">367</span>        @Override<a name="line.367"></a>
+<span class="sourceLineNo">368</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.369"></a>
+<span class="sourceLineNo">370</span>        }<a name="line.370"></a>
+<span class="sourceLineNo">371</span><a name="line.371"></a>
+<span class="sourceLineNo">372</span>        @Override<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.373"></a>
+<span class="sourceLineNo">374</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>            promise<a name="line.375"></a>
+<span class="sourceLineNo">376</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.376"></a>
+<span class="sourceLineNo">377</span>          } else {<a name="line.377"></a>
+<span class="sourceLineNo">378</span>            super.userEventTriggered(ctx, evt);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>          }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>        }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>        @Override<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          promise.tryFailure(cause);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>        }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      });<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  }<a name="line.387"></a>
+<span class="sourceLineNo">388</span><a name="line.388"></a>
+<span class="sourceLineNo">389</span>  private static void requestWriteBlock(Channel channel, StorageType storageType,<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    OpWriteBlockProto proto =<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();<a name="line.392"></a>
+<span class="sourceLineNo">393</span>    int protoLen = proto.getSerializedSize();<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    ByteBuf buffer =<a name="line.394"></a>
+<span class="sourceLineNo">395</span>      channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.397"></a>
+<span class="sourceLineNo">398</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    channel.writeAndFlush(buffer);<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  }<a name="line.400"></a>
+<span class="sourceLineNo">401</span><a name="line.401"></a>
+<span class="sourceLineNo">402</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.402"></a>
+<span class="sourceLineNo">403</span>      StorageType storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.403"></a>
+<span class="sourceLineNo">404</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.404"></a>
+<span class="sourceLineNo">405</span>      throws IOException {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.406"></a>
+<span class="sourceLineNo">407</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.407"></a>
+<span class="sourceLineNo">408</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.408"></a>
+<span class="sourceLineNo">409</span><a name="line.409"></a>
+<span class="sourceLineNo">410</span>      @Override<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>        if (future.isSuccess()) {<a name="line.412"></a>
+<span class="sourceLineNo">413</span>          // setup response processing pipeline first, then send request.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.415"></a>
+<span class="sourceLineNo">416</span>        } else {<a name="line.416"></a>
+<span class="sourceLineNo">417</span>          promise.tryFailure(future.cause());<a name="line.417"></a>
+<span class="sourceLineNo">418</span>        }<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      }<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    });<a name="line.420"></a>
+<span class="sourceLineNo">421</span>  }<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.425"></a>
+<span class="sourceLineNo">426</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    StorageType[] storageTypes = locatedBlock.getStorageTypes();<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.428"></a>
+<span class="sourceLineNo">429</span>    boolean connectToDnViaHostname =<a name="line.429"></a>
+<span class="sourceLineNo">430</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.432"></a>
+<span class="sourceLineNo">433</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.434"></a>
+<span class="sourceLineNo">435</span>      .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PBHelperClient.convert(blockCopy))<a name="line.435"></a>
+<span class="sourceLineNo">436</span>        .setToken(PBHelperClient.convert(locatedBlock.getBlockToken())))<a name="line.436"></a>
+<span class="sourceLineNo">437</span>      .setClientName(clientName).build();<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.438"></a>
+<span class="sourceLineNo">439</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        .setRequestedChecksum(checksumProto)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.445"></a>
+<span class="sourceLineNo">446</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.447"></a>
+<span class="sourceLineNo">448</span>      StorageType storageType = storageTypes[i];<a name="line.448"></a>
+<span class="sourceLineNo">449</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>      futureList.add(promise);<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.453"></a>
+<span class="sourceLineNo">454</span><a name="line.454"></a>
+<span class="sourceLineNo">455</span>            @Override<a name="line.455"></a>
+<span class="sourceLineNo">456</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.456"></a>
+<span class="sourceLineNo">457</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.457"></a>
+<span class="sourceLineNo">458</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.458"></a>
+<span class="sourceLineNo">459</span>              // a null handler.<a name="line.459"></a>
+<span class="sourceLineNo">460</span>            }<a name="line.460"></a>
+<span class="sourceLineNo">461</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.461"></a>
+<span class="sourceLineNo">462</span><a name="line.462"></a>
+<span class="sourceLineNo">463</span>            @Override<a name="line.463"></a>
+<span class="sourceLineNo">464</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>              if (future.isSuccess()) {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.466"></a>
+<span class="sourceLineNo">467</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.467"></a>
+<span class="sourceLineNo">468</span>              } else {<a name="line.468"></a>
+<span class="sourceLineNo">469</span>                promise.tryFailure(future.cause());<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              }<a name="line.470"></a>
+<span class="sourceLineNo">471</span>            }<a name="line.471"></a>
+<span class="sourceLineNo">472</span>          });<a name="line.472"></a>
+<span class="sourceLineNo">473</span>    }<a name="line.473"></a>
+<span class="sourceLineNo">474</span>    return futureList;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>  }<a name="line.475"></a>
+<span class="sourceLineNo">476</span><a name="line.476"></a>
+<span class="sourceLineNo">477</span>  /**<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   */<a name="line.479"></a>
+<span class="sourceLineNo">480</span>  public static class NameNodeException extends IOException {<a name="line.480"></a>
+<span class="sourceLineNo">481</span><a name="line.481"></a>
+<span class="sourceLineNo">482</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>    public NameNodeException(Throwable cause) {<a name="line.484"></a>
+<span class="sourceLineNo">485</span>      super(cause);<a name="line.485"></a>
+<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
+<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
+<span class="sourceLineNo">488</span><a name="line.488"></a>
+<span class="sourceLineNo">489</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.490"></a>
+<span class="sourceLineNo">491</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    Configuration conf = dfs.getConf();<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    DFSClient client = dfs.getClient();<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    String clientName = client.getClientName();<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    ClientProtocol namenode = client.getNamenode();<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    for (int retry = 0;; retry++) {<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      HdfsFileStatus stat;<a name="line.501"></a>
+<span class="sourceLineNo">502</span>      try {<a name="line.502"></a>
+<span class="sourceLineNo">503</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.503"></a>
+<span class="sourceLineNo">504</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.504"></a>
+<span class="sourceLineNo">505</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.505"></a>
+<span class="sourceLineNo">506</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.506"></a>
+<span class="sourceLineNo">507</span>      } catch (Exception e) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>        if (e instanceof RemoteException) {<a name="line.508"></a>
+<span class="sourceLineNo">509</span>          throw (RemoteException) e;<a name="line.509"></a>
+<span class="sourceLineNo">510</span>        } else {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>          throw new NameNodeException(e);<a name="line.511"></a>
+<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      }<a name="line.513"></a>
+<span class="sourceLineNo">514</span>      beginFileLease(client, stat.getFileId());<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      boolean succ = false;<a name="line.515"></a>
+<span class="sourceLineNo">516</span>      LocatedBlock locatedBlock = null;<a name="line.516"></a>
+<span class="sourceLineNo">517</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.517"></a>
+<span class="sourceLineNo">518</span>      try {<a name="line.518"></a>
+<span class="sourceLineNo">519</span>        DataChecksum summer = createChecksum(client);<a name="line.519"></a>
+<span class="sourceLineNo">520</span>        locatedBlock = namenode.addBlock(src, client.getClientName(), null, excludesNodes,<a name="line.520"></a>
+<span class="sourceLineNo">521</span>          stat.getFileId(), null, null);<a name="line.521"></a>
+<span class="sourceLineNo">522</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.523"></a>
+<span class="sourceLineNo">524</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.524"></a>
+<span class="sourceLineNo">525</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          try {<a name="line.526"></a>
+<span class="sourceLineNo">527</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.527"></a>
+<span class="sourceLineNo">528</span>          } catch (Exception e) {<a name="line.528"></a>
+<span class="sourceLineNo">529</span>            // exclude the broken DN next time<a name="line.529"></a>
+<span class="sourceLineNo">530</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.530"></a>
+<span class="sourceLineNo">531</span>            throw e;<a name="line.531"></a>
+<span class="sourceLineNo">532</span>          }<a name="line.532"></a>
+<span class="sourceLineNo">533</span>        }<a name="line.533"></a>
+<span class="sourceLineNo">534</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.534"></a>
+<span class="sourceLineNo">535</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.535"></a>
+<span class="sourceLineNo">536</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.536"></a>
+<span class="sourceLineNo">537</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        succ = true;<a name="line.538"></a>
+<span class="sourceLineNo">539</span>        return output;<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      } catch (RemoteException e) {<a name="line.540"></a>
+<span class="sourceLineNo">541</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>        if (shouldRetryCreate(e)) {<a name="line.542"></a>
+<span class="sourceLineNo">543</span>          if (retry &gt;= createMaxRetries) {<a name="line.543"></a>
+<span class="sourceLineNo">544</span>            throw e.unwrapRemoteException();<a name="line.544"></a>
+<span class="sourceLineNo">545</span>          }<a name="line.545"></a>
+<span class="sourceLineNo">546</span>        } else {<a name="line.546"></a>
+<span class="sourceLineNo">547</span>          throw e.unwrapRemoteException();<a name="line.547"></a>
+<span class="sourceLineNo">548</span>        }<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      } catch (IOException e) {<a name="line.549"></a>
+<span class="sourceLineNo">550</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.550"></a>
+<span class="sourceLineNo">551</span>        if (retry &gt;= createMaxRetries) {<a name="line.551"></a>
+<span class="sourceLineNo">552</span>          throw e;<a name="line.552"></a>
+<span class="sourceLineNo">553</span>        }<a name="line.553"></a>
+<span class="sourceLineNo">554</span>        // overwrite the old broken file.<a name="line.554"></a>
+<span class="sourceLineNo">555</span>        overwrite = true;<a name="line.555"></a>
+<span class="sourceLineNo">556</span>        try {<a name="line.556"></a>
+<span class="sourceLineNo">557</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.557"></a>
+<span class="sourceLineNo">558</span>        } catch (InterruptedException ie) {<a name="line.558"></a>
+<span class="sourceLineNo">559</span>          throw new InterruptedIOException();<a name="line.559"></a>
+<span class="sourceLineNo">560</span>        }<a name="line.560"></a>
+<span class="sourceLineNo">561</span>      } finally {<a name="line.561"></a>
+<span class="sourceLineNo">562</span>        if (!succ) {<a name="line.562"></a>
+<span class="sourceLineNo">563</span>          if (futureList != null) {<a name="line.563"></a>
+<span class="sourceLineNo">564</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.565"></a>
+<span class="sourceLineNo">566</span><a name="line.566"></a>
+<span class="sourceLineNo">567</span>                @Override<a name="line.567"></a>
+<span class="sourceLineNo">568</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.568"></a>
+<span class="sourceLineNo">569</span>                  if (future.isSuccess()) {<a name="line.569"></a>
+<span class="sourceLineNo">570</span>                    future.getNow().close();<a name="line.570"></a>
+<span class="sourceLineNo">571</span>                  }<a name="line.571"></a>
+<span class="sourceLineNo">572</span>                }<a name="line.572"></a>
+<span class="sourceLineNo">573</span>              });<a name="line.573"></a>
+<span class="sourceLineNo">574</span>            }<a name="line.574"></a>
+<span class="sourceLineNo">575</span>          }<a name="line.575"></a>
+<span class="sourceLineNo">576</span>          endFileLease(client, stat.getFileId());<a name="line.576"></a>
+<span class="sourceLineNo">577</span>        }<a name="line.577"></a>
 <span class="sourceLineNo">578</span>      }<a name="line.578"></a>
-<span class="sourceLineNo">579</span><a name="line.579"></a>
-<span class="sourceLineNo">580</span>      @Override<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>        throw new UnsupportedOperationException();<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      }<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }.resolve(dfs, f);<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.589"></a>
-<span class="sourceLineNo">590</span>    // DFSOutputStream.newStreamForCreate.<a name="line.590"></a>
-<span class="sourceLineNo">591</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.591"></a>
-<span class="sourceLineNo">592</span>  }<a name="line.592"></a>
-<span class="sourceLineNo">593</span><a name="line.593"></a>
-<span class="sourceLineNo">594</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      ExtendedBlock block, long fileId) {<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    for (int retry = 0;; retry++) {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      try {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>          endFileLease(client, fileId);<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          return;<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } else {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      } catch (RemoteException e) {<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        IOException ioe = e.unwrapRemoteException();<a name="line.605"></a>
-<span class="sourceLineNo">606</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>          return;<a name="line.608"></a>
-<span class="sourceLineNo">609</span>        } else {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        }<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      } catch (Exception e) {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      }<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      sleepIgnoreInterrupt(retry);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    }<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  }<a name="line.617"></a>
-<span class="sourceLineNo">618</span><a name="line.618"></a>
-<span class="sourceLineNo">619</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    try {<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    } catch (InterruptedException e) {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    }<a name="line.623"></a>
-<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
-<span class="sourceLineNo">625</span>}<a name="line.625"></a>
+<span class="sourceLineNo">579</span>    }<a name="line.579"></a>
+<span class="sourceLineNo">580</span>  }<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>  /**<a name="line.582"></a>
+<span class="sourceLineNo">583</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.583"></a>
+<span class="sourceLineNo">584</span>   * inside an {@link EventLoop}.<a name="line.584"></a>
+<span class="sourceLineNo">585</span>   */<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.587"></a>
+<span class="sourceLineNo">588</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.588"></a>
+<span class="sourceLineNo">589</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.589"></a>
+<span class="sourceLineNo">590</span><a name="line.590"></a>
+<span class="sourceLineNo">591</span>      @Override<a name="line.591"></a>
+<span class="sourceLineNo">592</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.592"></a>
+<span class="sourceLineNo">593</span>          throws IOException, UnresolvedLinkException {<a name="line.593"></a>
+<span class="sourceLineNo">594</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.594"></a>
+<span class="sourceLineNo">595</span>          blockSize, eventLoopGroup, channelClass);<a name="line.595"></a>
+<span class="sourceLineNo">596</span>      }<a name="line.596"></a>
+<span class="sourceLineNo">597</span><a name="line.597"></a>
+<span class="sourceLineNo">598</span>      @Override<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.599"></a>
+<span class="sourceLineNo">600</span>        throw new UnsupportedOperationException();<a name="line.600"></a>
+<span class="sourceLineNo">601</span>      }<a name="line.601"></a>
+<span class="sourceLineNo">602</span>    }.resolve(dfs, f);<a name="line.602"></a>
+<span class="sourceLineNo">603</span>  }<a name="line.603"></a>
+<span class="sourceLineNo">604</span><a name="line.604"></a>
+<span class="sourceLineNo">605</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.607"></a>
+<span class="sourceLineNo">608</span>    // DFSOutputStream.newStreamForCreate.<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.609"></a>
+<span class="sourceLineNo">610</span>  }<a name="line.610"></a>
+<span class="sourceLineNo">611</span><a name="line.611"></a>
+<span class="sourceLineNo">612</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.612"></a>
+<span class="sourceLineNo">613</span>      ExtendedBlock block, long fileId) {<a name="line.613"></a>
+<span class="sourceLineNo">614</span>    for (int retry = 0;; retry++) {<a name="line.614"></a>
+<span class="sourceLineNo">615</span>      try {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.616"></a>
+<span class="sourceLineNo">617</span>          endFileLease(client, fileId);<a name="line.617"></a>
+<span class="sourceLineNo">618</span>          return;<a name="line.618"></a>
+<span class="sourceLineNo">619</span>        } else {<a name="line.619"></a>
+<span class="sourceLineNo">620</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.620"></a>
+<span class="sourceLineNo">621</span>        }<a name="line.621"></a>
+<span class="sourceLineNo">622</span>      } catch (RemoteException e) {<a name="line.622"></a>
+<span class="sourceLineNo">623</span>        IOException ioe = e.unwrapRemoteException();<a name="line.623"></a>
+<span class="sourceLineNo">624</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.624"></a>
+<span class="sourceLineNo">625</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.625"></a>
+<span class="sourceLineNo">626</span>          return;<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        } else {<a name="line.627"></a>
+<span class="sourceLineNo">628</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.628"></a>
+<span class="sourceLineNo">629</span>        }<a name="line.629"></a>
+<span class="sourceLineNo">630</span>      } catch (Exception e) {<a name="line.630"></a>
+<span class="sourceLineNo">631</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      }<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      sleepIgnoreInterrupt(retry);<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
+<span class="sourceLineNo">635</span>  }<a name="line.635"></a>
+<span class="sourceLineNo">636</span><a name="line.636"></a>
+<span class="sourceLineNo">637</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.637"></a>
+<span class="sourceLineNo">638</span>    try {<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.639"></a>
+<span class="sourceLineNo">640</span>    } catch (InterruptedException e) {<a name="line.640"></a>
+<span class="sourceLineNo">641</span>    }<a name="line.641"></a>
+<span class="sourceLineNo">642</span>  }<a name="line.642"></a>
+<span class="sourceLineNo">643</span>}<a name="line.643"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
index 6684af5..3556576 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.LeaseManager.html
@@ -229,408 +229,426 @@
 <span class="sourceLineNo">221</span>    };<a name="line.221"></a>
 <span class="sourceLineNo">222</span>  }<a name="line.222"></a>
 <span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.224"></a>
+<span class="sourceLineNo">224</span>  private static FileCreator createFileCreator3_3() throws NoSuchMethodException {<a name="line.224"></a>
 <span class="sourceLineNo">225</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.226"></a>
-<span class="sourceLineNo">227</span>      CryptoProtocolVersion[].class, String.class);<a name="line.227"></a>
+<span class="sourceLineNo">226</span>        String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        CryptoProtocolVersion[].class, String.class, String.class);<a name="line.227"></a>
 <span class="sourceLineNo">228</span><a name="line.228"></a>
 <span class="sourceLineNo">229</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.229"></a>
 <span class="sourceLineNo">230</span>        supportedVersions) -&gt; {<a name="line.230"></a>
 <span class="sourceLineNo">231</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.231"></a>
-<span class="sourceLineNo">232</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.232"></a>
+<span class="sourceLineNo">232</span>          createParent, replication, blockSize, supportedVersions, null, null);<a name="line.232"></a>
 <span class="sourceLineNo">233</span>    };<a name="line.233"></a>
 <span class="sourceLineNo">234</span>  }<a name="line.234"></a>
 <span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.236"></a>
+<span class="sourceLineNo">236</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.236"></a>
 <span class="sourceLineNo">237</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.237"></a>
 <span class="sourceLineNo">238</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      CryptoProtocolVersion[].class);<a name="line.239"></a>
+<span class="sourceLineNo">239</span>      CryptoProtocolVersion[].class, String.class);<a name="line.239"></a>
 <span class="sourceLineNo">240</span><a name="line.240"></a>
 <span class="sourceLineNo">241</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.241"></a>
 <span class="sourceLineNo">242</span>        supportedVersions) -&gt; {<a name="line.242"></a>
 <span class="sourceLineNo">243</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        createParent, replication, blockSize, supportedVersions);<a name="line.244"></a>
+<span class="sourceLineNo">244</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.244"></a>
 <span class="sourceLineNo">245</span>    };<a name="line.245"></a>
 <span class="sourceLineNo">246</span>  }<a name="line.246"></a>
 <span class="sourceLineNo">247</span><a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    try {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return createFileCreator3();<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    } catch (NoSuchMethodException e) {<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    }<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    return createFileCreator2();<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  }<a name="line.255"></a>
-<span class="sourceLineNo">256</span><a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // cancel the processing if DFSClient is already closed.<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.258"></a>
+<span class="sourceLineNo">248</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      CryptoProtocolVersion[].class);<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        supportedVersions) -&gt; {<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.255"></a>
+<span class="sourceLineNo">256</span>        createParent, replication, blockSize, supportedVersions);<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    };<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  }<a name="line.258"></a>
 <span class="sourceLineNo">259</span><a name="line.259"></a>
-<span class="sourceLineNo">260</span>    private final DFSClient client;<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    public CancelOnClose(DFSClient client) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      this.client = client;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
-<span class="sourceLineNo">265</span><a name="line.265"></a>
-<span class="sourceLineNo">266</span>    @Override<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    public boolean progress() {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  static {<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    try {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      LEASE_MANAGER = createLeaseManager();<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      FILE_CREATOR = createFileCreator();<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    } catch (Exception e) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.278"></a>
-<span class="sourceLineNo">279</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          "HBASE-16110 for more information.";<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      LOG.error(msg, e);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      throw new Error(msg, e);<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
-<span class="sourceLineNo">285</span><a name="line.285"></a>
-<span class="sourceLineNo">286</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.287"></a>
+<span class="sourceLineNo">260</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    try {<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      return createFileCreator3_3();<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    } catch (NoSuchMethodException e) {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 3.2 or below");<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    }<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>    try {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      return createFileCreator3();<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    } catch (NoSuchMethodException e) {<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    }<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    return createFileCreator2();<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  }<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>  // cancel the processing if DFSClient is already closed.<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.276"></a>
+<span class="sourceLineNo">277</span><a name="line.277"></a>
+<span class="sourceLineNo">278</span>    private final DFSClient client;<a name="line.278"></a>
+<span class="sourceLineNo">279</span><a name="line.279"></a>
+<span class="sourceLineNo">280</span>    public CancelOnClose(DFSClient client) {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      this.client = client;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    public boolean progress() {<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
 <span class="sourceLineNo">288</span>  }<a name="line.288"></a>
 <span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  }<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    return client.getConf().createChecksum(null);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  }<a name="line.296"></a>
-<span class="sourceLineNo">297</span><a name="line.297"></a>
-<span class="sourceLineNo">298</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    List&lt;Integer&gt; flagList = ack.getFlagList();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    Integer headerFlag;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    if (flagList.isEmpty()) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      Status reply = ack.getReply(0);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      headerFlag = PipelineAck.combineHeader(ECN.DISABLED, reply);<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    } else {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      headerFlag = flagList.get(0);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    return PipelineAck.getStatusFromHeader(headerFlag);<a name="line.307"></a>
-<span class="sourceLineNo">308</span>  }<a name="line.308"></a>
-<span class="sourceLineNo">309</span><a name="line.309"></a>
-<span class="sourceLineNo">310</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      new ProtobufVarint32FrameDecoder(),<a name="line.313"></a>
-<span class="sourceLineNo">314</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>        @Override<a name="line.317"></a>
-<span class="sourceLineNo">318</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.318"></a>
-<span class="sourceLineNo">319</span>            throws Exception {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>          Status pipelineStatus = resp.getStatus();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.322"></a>
-<span class="sourceLineNo">323</span>          }<a name="line.323"></a>
-<span class="sourceLineNo">324</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.324"></a>
-<span class="sourceLineNo">325</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.327"></a>
-<span class="sourceLineNo">328</span>                  resp.getMessage() + ", " + logInfo);<a name="line.328"></a>
-<span class="sourceLineNo">329</span>            } else {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.330"></a>
-<span class="sourceLineNo">331</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>            }<a name="line.332"></a>
-<span class="sourceLineNo">333</span>          }<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // success<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          ChannelPipeline p = ctx.pipeline();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.337"></a>
-<span class="sourceLineNo">338</span>            // of pipeline.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>            if (handler instanceof IdleStateHandler) {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>              break;<a name="line.340"></a>
-<span class="sourceLineNo">341</span>            }<a name="line.341"></a>
-<span class="sourceLineNo">342</span>          }<a name="line.342"></a>
-<span class="sourceLineNo">343</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.343"></a>
-<span class="sourceLineNo">344</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>          ctx.channel().config().setAutoRead(false);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>          promise.trySuccess(ctx.channel());<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
-<span class="sourceLineNo">348</span><a name="line.348"></a>
-<span class="sourceLineNo">349</span>        @Override<a name="line.349"></a>
-<span class="sourceLineNo">350</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.351"></a>
-<span class="sourceLineNo">352</span>        }<a name="line.352"></a>
-<span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>        @Override<a name="line.354"></a>
-<span class="sourceLineNo">355</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.356"></a>
-<span class="sourceLineNo">357</span>            promise<a name="line.357"></a>
-<span class="sourceLineNo">358</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>          } else {<a name="line.359"></a>
-<span class="sourceLineNo">360</span>            super.userEventTriggered(ctx, evt);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>          }<a name="line.361"></a>
-<span class="sourceLineNo">362</span>        }<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>        @Override<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>          promise.tryFailure(cause);<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        }<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      });<a name="line.368"></a>
-<span class="sourceLineNo">369</span>  }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>  private static void requestWriteBlock(Channel channel, StorageType storageType,<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    OpWriteBlockProto proto =<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    int protoLen = proto.getSerializedSize();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    ByteBuf buffer =<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    channel.writeAndFlush(buffer);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  }<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      StorageType storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      throws IOException {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>      @Override<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        if (future.isSuccess()) {<a name="line.394"></a>
-<span class="sourceLineNo">395</span>          // setup response processing pipeline first, then send request.<a name="line.395"></a>
-<span class="sourceLineNo">396</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.396"></a>
-<span class="sourceLineNo">397</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        } else {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>          promise.tryFailure(future.cause());<a name="line.399"></a>
-<span class="sourceLineNo">400</span>        }<a name="line.400"></a>
-<span class="sourceLineNo">401</span>      }<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    });<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
-<span class="sourceLineNo">404</span><a name="line.404"></a>
-<span class="sourceLineNo">405</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.407"></a>
-<span class="sourceLineNo">408</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    StorageType[] storageTypes = locatedBlock.getStorageTypes();<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    boolean connectToDnViaHostname =<a name="line.411"></a>
-<span class="sourceLineNo">412</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PBHelperClient.convert(blockCopy))<a name="line.417"></a>
-<span class="sourceLineNo">418</span>        .setToken(PBHelperClient.convert(locatedBlock.getBlockToken())))<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      .setClientName(clientName).build();<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.423"></a>
-<span class="sourceLineNo">424</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.424"></a>
-<span class="sourceLineNo">425</span>        .setRequestedChecksum(checksumProto)<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      StorageType storageType = storageTypes[i];<a name="line.430"></a>
-<span class="sourceLineNo">431</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      futureList.add(promise);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>            @Override<a name="line.437"></a>
-<span class="sourceLineNo">438</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.438"></a>
-<span class="sourceLineNo">439</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.439"></a>
-<span class="sourceLineNo">440</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.440"></a>
-<span class="sourceLineNo">441</span>              // a null handler.<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            }<a name="line.442"></a>
-<span class="sourceLineNo">443</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>            @Override<a name="line.445"></a>
-<span class="sourceLineNo">446</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>              if (future.isSuccess()) {<a name="line.447"></a>
-<span class="sourceLineNo">448</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.448"></a>
-<span class="sourceLineNo">449</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.449"></a>
-<span class="sourceLineNo">450</span>              } else {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>                promise.tryFailure(future.cause());<a name="line.451"></a>
-<span class="sourceLineNo">452</span>              }<a name="line.452"></a>
-<span class="sourceLineNo">453</span>            }<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          });<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    }<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    return futureList;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>  }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  public static class NameNodeException extends IOException {<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>    public NameNodeException(Throwable cause) {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      super(cause);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    }<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
-<span class="sourceLineNo">470</span><a name="line.470"></a>
-<span class="sourceLineNo">471</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.471"></a>
-<span class="sourceLineNo">472</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.472"></a>
-<span class="sourceLineNo">473</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    Configuration conf = dfs.getConf();<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    DFSClient client = dfs.getClient();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    String clientName = client.getClientName();<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    ClientProtocol namenode = client.getNamenode();<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.479"></a>
-<span class="sourceLineNo">480</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    for (int retry = 0;; retry++) {<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      HdfsFileStatus stat;<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.486"></a>
-<span class="sourceLineNo">487</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      } catch (Exception e) {<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        if (e instanceof RemoteException) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>          throw (RemoteException) e;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        } else {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>          throw new NameNodeException(e);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        }<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      }<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      beginFileLease(client, stat.getFileId());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      boolean succ = false;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      LocatedBlock locatedBlock = null;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      try {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>        DataChecksum summer = createChecksum(client);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        locatedBlock = namenode.addBlock(src, client.getClientName(), null, excludesNodes,<a name="line.502"></a>
-<span class="sourceLineNo">503</span>          stat.getFileId(), null, null);<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          try {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          } catch (Exception e) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>            // exclude the broken DN next time<a name="line.511"></a>
-<span class="sourceLineNo">512</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.512"></a>
-<span class="sourceLineNo">513</span>            throw e;<a name="line.513"></a>
-<span class="sourceLineNo">514</span>          }<a name="line.514"></a>
-<span class="sourceLineNo">515</span>        }<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.516"></a>
-<span class="sourceLineNo">517</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.518"></a>
-<span class="sourceLineNo">519</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.519"></a>
-<span class="sourceLineNo">520</span>        succ = true;<a name="line.520"></a>
-<span class="sourceLineNo">521</span>        return output;<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      } catch (RemoteException e) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.523"></a>
-<span class="sourceLineNo">524</span>        if (shouldRetryCreate(e)) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          if (retry &gt;= createMaxRetries) {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>            throw e.unwrapRemoteException();<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          }<a name="line.527"></a>
-<span class="sourceLineNo">528</span>        } else {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          throw e.unwrapRemoteException();<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      } catch (IOException e) {<a name="line.531"></a>
-<span class="sourceLineNo">532</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>        if (retry &gt;= createMaxRetries) {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          throw e;<a name="line.534"></a>
-<span class="sourceLineNo">535</span>        }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>        // overwrite the old broken file.<a name="line.536"></a>
-<span class="sourceLineNo">537</span>        overwrite = true;<a name="line.537"></a>
-<span class="sourceLineNo">538</span>        try {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.539"></a>
-<span class="sourceLineNo">540</span>        } catch (InterruptedException ie) {<a name="line.540"></a>
-<span class="sourceLineNo">541</span>          throw new InterruptedIOException();<a name="line.541"></a>
-<span class="sourceLineNo">542</span>        }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      } finally {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>        if (!succ) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>          if (futureList != null) {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>                @Override<a name="line.549"></a>
-<span class="sourceLineNo">550</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.550"></a>
-<span class="sourceLineNo">551</span>                  if (future.isSuccess()) {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>                    future.getNow().close();<a name="line.552"></a>
-<span class="sourceLineNo">553</span>                  }<a name="line.553"></a>
-<span class="sourceLineNo">554</span>                }<a name="line.554"></a>
-<span class="sourceLineNo">555</span>              });<a name="line.555"></a>
-<span class="sourceLineNo">556</span>            }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          }<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          endFileLease(client, stat.getFileId());<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * inside an {@link EventLoop}.<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.569"></a>
-<span class="sourceLineNo">570</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.571"></a>
-<span class="sourceLineNo">572</span><a name="line.572"></a>
-<span class="sourceLineNo">573</span>      @Override<a name="line.573"></a>
-<span class="sourceLineNo">574</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.574"></a>
-<span class="sourceLineNo">575</span>          throws IOException, UnresolvedLinkException {<a name="line.575"></a>
-<span class="sourceLineNo">576</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.576"></a>
-<span class="sourceLineNo">577</span>          blockSize, eventLoopGroup, channelClass);<a name="line.577"></a>
+<span class="sourceLineNo">290</span>  static {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    try {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      LEASE_MANAGER = createLeaseManager();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.293"></a>
+<span class="sourceLineNo">294</span>      FILE_CREATOR = createFileCreator();<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    } catch (Exception e) {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.297"></a>
+<span class="sourceLineNo">298</span>          "HBASE-16110 for more information.";<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      LOG.error(msg, e);<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      throw new Error(msg, e);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    }<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
+<span class="sourceLineNo">303</span><a name="line.303"></a>
+<span class="sourceLineNo">304</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  }<a name="line.306"></a>
+<span class="sourceLineNo">307</span><a name="line.307"></a>
+<span class="sourceLineNo">308</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    return client.getConf().createChecksum(null);<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  }<a name="line.314"></a>
+<span class="sourceLineNo">315</span><a name="line.315"></a>
+<span class="sourceLineNo">316</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    List&lt;Integer&gt; flagList = ack.getFlagList();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    Integer headerFlag;<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    if (flagList.isEmpty()) {<a name="line.319"></a>
+<span class="sourceLineNo">320</span>      Status reply = ack.getReply(0);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      headerFlag = PipelineAck.combineHeader(ECN.DISABLED, reply);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    } else {<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      headerFlag = flagList.get(0);<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    }<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    return PipelineAck.getStatusFromHeader(headerFlag);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.328"></a>
+<span class="sourceLineNo">329</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.330"></a>
+<span class="sourceLineNo">331</span>      new ProtobufVarint32FrameDecoder(),<a name="line.331"></a>
+<span class="sourceLineNo">332</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.333"></a>
+<span class="sourceLineNo">334</span><a name="line.334"></a>
+<span class="sourceLineNo">335</span>        @Override<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.336"></a>
+<span class="sourceLineNo">337</span>            throws Exception {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          Status pipelineStatus = resp.getStatus();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.339"></a>
+<span class="sourceLineNo">340</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.340"></a>
+<span class="sourceLineNo">341</span>          }<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.343"></a>
+<span class="sourceLineNo">344</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.344"></a>
+<span class="sourceLineNo">345</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.345"></a>
+<span class="sourceLineNo">346</span>                  resp.getMessage() + ", " + logInfo);<a name="line.346"></a>
+<span class="sourceLineNo">347</span>            } else {<a name="line.347"></a>
+<span class="sourceLineNo">348</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.348"></a>
+<span class="sourceLineNo">349</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.349"></a>
+<span class="sourceLineNo">350</span>            }<a name="line.350"></a>
+<span class="sourceLineNo">351</span>          }<a name="line.351"></a>
+<span class="sourceLineNo">352</span>          // success<a name="line.352"></a>
+<span class="sourceLineNo">353</span>          ChannelPipeline p = ctx.pipeline();<a name="line.353"></a>
+<span class="sourceLineNo">354</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.354"></a>
+<span class="sourceLineNo">355</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.355"></a>
+<span class="sourceLineNo">356</span>            // of pipeline.<a name="line.356"></a>
+<span class="sourceLineNo">357</span>            if (handler instanceof IdleStateHandler) {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>              break;<a name="line.358"></a>
+<span class="sourceLineNo">359</span>            }<a name="line.359"></a>
+<span class="sourceLineNo">360</span>          }<a name="line.360"></a>
+<span class="sourceLineNo">361</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.361"></a>
+<span class="sourceLineNo">362</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.362"></a>
+<span class="sourceLineNo">363</span>          ctx.channel().config().setAutoRead(false);<a name="line.363"></a>
+<span class="sourceLineNo">364</span>          promise.trySuccess(ctx.channel());<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
+<span class="sourceLineNo">366</span><a name="line.366"></a>
+<span class="sourceLineNo">367</span>        @Override<a name="line.367"></a>
+<span class="sourceLineNo">368</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.369"></a>
+<span class="sourceLineNo">370</span>        }<a name="line.370"></a>
+<span class="sourceLineNo">371</span><a name="line.371"></a>
+<span class="sourceLineNo">372</span>        @Override<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.373"></a>
+<span class="sourceLineNo">374</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>            promise<a name="line.375"></a>
+<span class="sourceLineNo">376</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.376"></a>
+<span class="sourceLineNo">377</span>          } else {<a name="line.377"></a>
+<span class="sourceLineNo">378</span>            super.userEventTriggered(ctx, evt);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>          }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>        }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>        @Override<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          promise.tryFailure(cause);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>        }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      });<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  }<a name="line.387"></a>
+<span class="sourceLineNo">388</span><a name="line.388"></a>
+<span class="sourceLineNo">389</span>  private static void requestWriteBlock(Channel channel, StorageType storageType,<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    OpWriteBlockProto proto =<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();<a name="line.392"></a>
+<span class="sourceLineNo">393</span>    int protoLen = proto.getSerializedSize();<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    ByteBuf buffer =<a name="line.394"></a>
+<span class="sourceLineNo">395</span>      channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.397"></a>
+<span class="sourceLineNo">398</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    channel.writeAndFlush(buffer);<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  }<a name="line.400"></a>
+<span class="sourceLineNo">401</span><a name="line.401"></a>
+<span class="sourceLineNo">402</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.402"></a>
+<span class="sourceLineNo">403</span>      StorageType storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.403"></a>
+<span class="sourceLineNo">404</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.404"></a>
+<span class="sourceLineNo">405</span>      throws IOException {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.406"></a>
+<span class="sourceLineNo">407</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.407"></a>
+<span class="sourceLineNo">408</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.408"></a>
+<span class="sourceLineNo">409</span><a name="line.409"></a>
+<span class="sourceLineNo">410</span>      @Override<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>        if (future.isSuccess()) {<a name="line.412"></a>
+<span class="sourceLineNo">413</span>          // setup response processing pipeline first, then send request.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.415"></a>
+<span class="sourceLineNo">416</span>        } else {<a name="line.416"></a>
+<span class="sourceLineNo">417</span>          promise.tryFailure(future.cause());<a name="line.417"></a>
+<span class="sourceLineNo">418</span>        }<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      }<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    });<a name="line.420"></a>
+<span class="sourceLineNo">421</span>  }<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.425"></a>
+<span class="sourceLineNo">426</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    StorageType[] storageTypes = locatedBlock.getStorageTypes();<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.428"></a>
+<span class="sourceLineNo">429</span>    boolean connectToDnViaHostname =<a name="line.429"></a>
+<span class="sourceLineNo">430</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.432"></a>
+<span class="sourceLineNo">433</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.434"></a>
+<span class="sourceLineNo">435</span>      .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PBHelperClient.convert(blockCopy))<a name="line.435"></a>
+<span class="sourceLineNo">436</span>        .setToken(PBHelperClient.convert(locatedBlock.getBlockToken())))<a name="line.436"></a>
+<span class="sourceLineNo">437</span>      .setClientName(clientName).build();<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.438"></a>
+<span class="sourceLineNo">439</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        .setRequestedChecksum(checksumProto)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.445"></a>
+<span class="sourceLineNo">446</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.447"></a>
+<span class="sourceLineNo">448</span>      StorageType storageType = storageTypes[i];<a name="line.448"></a>
+<span class="sourceLineNo">449</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>      futureList.add(promise);<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.453"></a>
+<span class="sourceLineNo">454</span><a name="line.454"></a>
+<span class="sourceLineNo">455</span>            @Override<a name="line.455"></a>
+<span class="sourceLineNo">456</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.456"></a>
+<span class="sourceLineNo">457</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.457"></a>
+<span class="sourceLineNo">458</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.458"></a>
+<span class="sourceLineNo">459</span>              // a null handler.<a name="line.459"></a>
+<span class="sourceLineNo">460</span>            }<a name="line.460"></a>
+<span class="sourceLineNo">461</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.461"></a>
+<span class="sourceLineNo">462</span><a name="line.462"></a>
+<span class="sourceLineNo">463</span>            @Override<a name="line.463"></a>
+<span class="sourceLineNo">464</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>              if (future.isSuccess()) {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.466"></a>
+<span class="sourceLineNo">467</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.467"></a>
+<span class="sourceLineNo">468</span>              } else {<a name="line.468"></a>
+<span class="sourceLineNo">469</span>                promise.tryFailure(future.cause());<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              }<a name="line.470"></a>
+<span class="sourceLineNo">471</span>            }<a name="line.471"></a>
+<span class="sourceLineNo">472</span>          });<a name="line.472"></a>
+<span class="sourceLineNo">473</span>    }<a name="line.473"></a>
+<span class="sourceLineNo">474</span>    return futureList;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>  }<a name="line.475"></a>
+<span class="sourceLineNo">476</span><a name="line.476"></a>
+<span class="sourceLineNo">477</span>  /**<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   */<a name="line.479"></a>
+<span class="sourceLineNo">480</span>  public static class NameNodeException extends IOException {<a name="line.480"></a>
+<span class="sourceLineNo">481</span><a name="line.481"></a>
+<span class="sourceLineNo">482</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>    public NameNodeException(Throwable cause) {<a name="line.484"></a>
+<span class="sourceLineNo">485</span>      super(cause);<a name="line.485"></a>
+<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
+<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
+<span class="sourceLineNo">488</span><a name="line.488"></a>
+<span class="sourceLineNo">489</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.490"></a>
+<span class="sourceLineNo">491</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    Configuration conf = dfs.getConf();<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    DFSClient client = dfs.getClient();<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    String clientName = client.getClientName();<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    ClientProtocol namenode = client.getNamenode();<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    for (int retry = 0;; retry++) {<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      HdfsFileStatus stat;<a name="line.501"></a>
+<span class="sourceLineNo">502</span>      try {<a name="line.502"></a>
+<span class="sourceLineNo">503</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.503"></a>
+<span class="sourceLineNo">504</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.504"></a>
+<span class="sourceLineNo">505</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.505"></a>
+<span class="sourceLineNo">506</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.506"></a>
+<span class="sourceLineNo">507</span>      } catch (Exception e) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>        if (e instanceof RemoteException) {<a name="line.508"></a>
+<span class="sourceLineNo">509</span>          throw (RemoteException) e;<a name="line.509"></a>
+<span class="sourceLineNo">510</span>        } else {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>          throw new NameNodeException(e);<a name="line.511"></a>
+<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      }<a name="line.513"></a>
+<span class="sourceLineNo">514</span>      beginFileLease(client, stat.getFileId());<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      boolean succ = false;<a name="line.515"></a>
+<span class="sourceLineNo">516</span>      LocatedBlock locatedBlock = null;<a name="line.516"></a>
+<span class="sourceLineNo">517</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.517"></a>
+<span class="sourceLineNo">518</span>      try {<a name="line.518"></a>
+<span class="sourceLineNo">519</span>        DataChecksum summer = createChecksum(client);<a name="line.519"></a>
+<span class="sourceLineNo">520</span>        locatedBlock = namenode.addBlock(src, client.getClientName(), null, excludesNodes,<a name="line.520"></a>
+<span class="sourceLineNo">521</span>          stat.getFileId(), null, null);<a name="line.521"></a>
+<span class="sourceLineNo">522</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.523"></a>
+<span class="sourceLineNo">524</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.524"></a>
+<span class="sourceLineNo">525</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          try {<a name="line.526"></a>
+<span class="sourceLineNo">527</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.527"></a>
+<span class="sourceLineNo">528</span>          } catch (Exception e) {<a name="line.528"></a>
+<span class="sourceLineNo">529</span>            // exclude the broken DN next time<a name="line.529"></a>
+<span class="sourceLineNo">530</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.530"></a>
+<span class="sourceLineNo">531</span>            throw e;<a name="line.531"></a>
+<span class="sourceLineNo">532</span>          }<a name="line.532"></a>
+<span class="sourceLineNo">533</span>        }<a name="line.533"></a>
+<span class="sourceLineNo">534</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.534"></a>
+<span class="sourceLineNo">535</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.535"></a>
+<span class="sourceLineNo">536</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.536"></a>
+<span class="sourceLineNo">537</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        succ = true;<a name="line.538"></a>
+<span class="sourceLineNo">539</span>        return output;<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      } catch (RemoteException e) {<a name="line.540"></a>
+<span class="sourceLineNo">541</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>        if (shouldRetryCreate(e)) {<a name="line.542"></a>
+<span class="sourceLineNo">543</span>          if (retry &gt;= createMaxRetries) {<a name="line.543"></a>
+<span class="sourceLineNo">544</span>            throw e.unwrapRemoteException();<a name="line.544"></a>
+<span class="sourceLineNo">545</span>          }<a name="line.545"></a>
+<span class="sourceLineNo">546</span>        } else {<a name="line.546"></a>
+<span class="sourceLineNo">547</span>          throw e.unwrapRemoteException();<a name="line.547"></a>
+<span class="sourceLineNo">548</span>        }<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      } catch (IOException e) {<a name="line.549"></a>
+<span class="sourceLineNo">550</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.550"></a>
+<span class="sourceLineNo">551</span>        if (retry &gt;= createMaxRetries) {<a name="line.551"></a>
+<span class="sourceLineNo">552</span>          throw e;<a name="line.552"></a>
+<span class="sourceLineNo">553</span>        }<a name="line.553"></a>
+<span class="sourceLineNo">554</span>        // overwrite the old broken file.<a name="line.554"></a>
+<span class="sourceLineNo">555</span>        overwrite = true;<a name="line.555"></a>
+<span class="sourceLineNo">556</span>        try {<a name="line.556"></a>
+<span class="sourceLineNo">557</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.557"></a>
+<span class="sourceLineNo">558</span>        } catch (InterruptedException ie) {<a name="line.558"></a>
+<span class="sourceLineNo">559</span>          throw new InterruptedIOException();<a name="line.559"></a>
+<span class="sourceLineNo">560</span>        }<a name="line.560"></a>
+<span class="sourceLineNo">561</span>      } finally {<a name="line.561"></a>
+<span class="sourceLineNo">562</span>        if (!succ) {<a name="line.562"></a>
+<span class="sourceLineNo">563</span>          if (futureList != null) {<a name="line.563"></a>
+<span class="sourceLineNo">564</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.565"></a>
+<span class="sourceLineNo">566</span><a name="line.566"></a>
+<span class="sourceLineNo">567</span>                @Override<a name="line.567"></a>
+<span class="sourceLineNo">568</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.568"></a>
+<span class="sourceLineNo">569</span>                  if (future.isSuccess()) {<a name="line.569"></a>
+<span class="sourceLineNo">570</span>                    future.getNow().close();<a name="line.570"></a>
+<span class="sourceLineNo">571</span>                  }<a name="line.571"></a>
+<span class="sourceLineNo">572</span>                }<a name="line.572"></a>
+<span class="sourceLineNo">573</span>              });<a name="line.573"></a>
+<span class="sourceLineNo">574</span>            }<a name="line.574"></a>
+<span class="sourceLineNo">575</span>          }<a name="line.575"></a>
+<span class="sourceLineNo">576</span>          endFileLease(client, stat.getFileId());<a name="line.576"></a>
+<span class="sourceLineNo">577</span>        }<a name="line.577"></a>
 <span class="sourceLineNo">578</span>      }<a name="line.578"></a>
-<span class="sourceLineNo">579</span><a name="line.579"></a>
-<span class="sourceLineNo">580</span>      @Override<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>        throw new UnsupportedOperationException();<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      }<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }.resolve(dfs, f);<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.589"></a>
-<span class="sourceLineNo">590</span>    // DFSOutputStream.newStreamForCreate.<a name="line.590"></a>
-<span class="sourceLineNo">591</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.591"></a>
-<span class="sourceLineNo">592</span>  }<a name="line.592"></a>
-<span class="sourceLineNo">593</span><a name="line.593"></a>
-<span class="sourceLineNo">594</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      ExtendedBlock block, long fileId) {<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    for (int retry = 0;; retry++) {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      try {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>          endFileLease(client, fileId);<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          return;<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } else {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      } catch (RemoteException e) {<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        IOException ioe = e.unwrapRemoteException();<a name="line.605"></a>
-<span class="sourceLineNo">606</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>          return;<a name="line.608"></a>
-<span class="sourceLineNo">609</span>        } else {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        }<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      } catch (Exception e) {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      }<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      sleepIgnoreInterrupt(retry);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    }<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  }<a name="line.617"></a>
-<span class="sourceLineNo">618</span><a name="line.618"></a>
-<span class="sourceLineNo">619</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    try {<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    } catch (InterruptedException e) {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    }<a name="line.623"></a>
-<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
-<span class="sourceLineNo">625</span>}<a name="line.625"></a>
+<span class="sourceLineNo">579</span>    }<a name="line.579"></a>
+<span class="sourceLineNo">580</span>  }<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>  /**<a name="line.582"></a>
+<span class="sourceLineNo">583</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.583"></a>
+<span class="sourceLineNo">584</span>   * inside an {@link EventLoop}.<a name="line.584"></a>
+<span class="sourceLineNo">585</span>   */<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.587"></a>
+<span class="sourceLineNo">588</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.588"></a>
+<span class="sourceLineNo">589</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.589"></a>
+<span class="sourceLineNo">590</span><a name="line.590"></a>
+<span class="sourceLineNo">591</span>      @Override<a name="line.591"></a>
+<span class="sourceLineNo">592</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.592"></a>
+<span class="sourceLineNo">593</span>          throws IOException, UnresolvedLinkException {<a name="line.593"></a>
+<span class="sourceLineNo">594</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.594"></a>
+<span class="sourceLineNo">595</span>          blockSize, eventLoopGroup, channelClass);<a name="line.595"></a>
+<span class="sourceLineNo">596</span>      }<a name="line.596"></a>
+<span class="sourceLineNo">597</span><a name="line.597"></a>
+<span class="sourceLineNo">598</span>      @Override<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.599"></a>
+<span class="sourceLineNo">600</span>        throw new UnsupportedOperationException();<a name="line.600"></a>
+<span class="sourceLineNo">601</span>      }<a name="line.601"></a>
+<span class="sourceLineNo">602</span>    }.resolve(dfs, f);<a name="line.602"></a>
+<span class="sourceLineNo">603</span>  }<a name="line.603"></a>
+<span class="sourceLineNo">604</span><a name="line.604"></a>
+<span class="sourceLineNo">605</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.607"></a>
+<span class="sourceLineNo">608</span>    // DFSOutputStream.newStreamForCreate.<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.609"></a>
+<span class="sourceLineNo">610</span>  }<a name="line.610"></a>
+<span class="sourceLineNo">611</span><a name="line.611"></a>
+<span class="sourceLineNo">612</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.612"></a>
+<span class="sourceLineNo">613</span>      ExtendedBlock block, long fileId) {<a name="line.613"></a>
+<span class="sourceLineNo">614</span>    for (int retry = 0;; retry++) {<a name="line.614"></a>
+<span class="sourceLineNo">615</span>      try {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.616"></a>
+<span class="sourceLineNo">617</span>          endFileLease(client, fileId);<a name="line.617"></a>
+<span class="sourceLineNo">618</span>          return;<a name="line.618"></a>
+<span class="sourceLineNo">619</span>        } else {<a name="line.619"></a>
+<span class="sourceLineNo">620</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.620"></a>
+<span class="sourceLineNo">621</span>        }<a name="line.621"></a>
+<span class="sourceLineNo">622</span>      } catch (RemoteException e) {<a name="line.622"></a>
+<span class="sourceLineNo">623</span>        IOException ioe = e.unwrapRemoteException();<a name="line.623"></a>
+<span class="sourceLineNo">624</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.624"></a>
+<span class="sourceLineNo">625</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.625"></a>
+<span class="sourceLineNo">626</span>          return;<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        } else {<a name="line.627"></a>
+<span class="sourceLineNo">628</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.628"></a>
+<span class="sourceLineNo">629</span>        }<a name="line.629"></a>
+<span class="sourceLineNo">630</span>      } catch (Exception e) {<a name="line.630"></a>
+<span class="sourceLineNo">631</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      }<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      sleepIgnoreInterrupt(retry);<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
+<span class="sourceLineNo">635</span>  }<a name="line.635"></a>
+<span class="sourceLineNo">636</span><a name="line.636"></a>
+<span class="sourceLineNo">637</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.637"></a>
+<span class="sourceLineNo">638</span>    try {<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.639"></a>
+<span class="sourceLineNo">640</span>    } catch (InterruptedException e) {<a name="line.640"></a>
+<span class="sourceLineNo">641</span>    }<a name="line.641"></a>
+<span class="sourceLineNo">642</span>  }<a name="line.642"></a>
+<span class="sourceLineNo">643</span>}<a name="line.643"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
index 6684af5..3556576 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.NameNodeException.html
@@ -229,408 +229,426 @@
 <span class="sourceLineNo">221</span>    };<a name="line.221"></a>
 <span class="sourceLineNo">222</span>  }<a name="line.222"></a>
 <span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.224"></a>
+<span class="sourceLineNo">224</span>  private static FileCreator createFileCreator3_3() throws NoSuchMethodException {<a name="line.224"></a>
 <span class="sourceLineNo">225</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.226"></a>
-<span class="sourceLineNo">227</span>      CryptoProtocolVersion[].class, String.class);<a name="line.227"></a>
+<span class="sourceLineNo">226</span>        String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        CryptoProtocolVersion[].class, String.class, String.class);<a name="line.227"></a>
 <span class="sourceLineNo">228</span><a name="line.228"></a>
 <span class="sourceLineNo">229</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.229"></a>
 <span class="sourceLineNo">230</span>        supportedVersions) -&gt; {<a name="line.230"></a>
 <span class="sourceLineNo">231</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.231"></a>
-<span class="sourceLineNo">232</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.232"></a>
+<span class="sourceLineNo">232</span>          createParent, replication, blockSize, supportedVersions, null, null);<a name="line.232"></a>
 <span class="sourceLineNo">233</span>    };<a name="line.233"></a>
 <span class="sourceLineNo">234</span>  }<a name="line.234"></a>
 <span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.236"></a>
+<span class="sourceLineNo">236</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.236"></a>
 <span class="sourceLineNo">237</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.237"></a>
 <span class="sourceLineNo">238</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      CryptoProtocolVersion[].class);<a name="line.239"></a>
+<span class="sourceLineNo">239</span>      CryptoProtocolVersion[].class, String.class);<a name="line.239"></a>
 <span class="sourceLineNo">240</span><a name="line.240"></a>
 <span class="sourceLineNo">241</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.241"></a>
 <span class="sourceLineNo">242</span>        supportedVersions) -&gt; {<a name="line.242"></a>
 <span class="sourceLineNo">243</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        createParent, replication, blockSize, supportedVersions);<a name="line.244"></a>
+<span class="sourceLineNo">244</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.244"></a>
 <span class="sourceLineNo">245</span>    };<a name="line.245"></a>
 <span class="sourceLineNo">246</span>  }<a name="line.246"></a>
 <span class="sourceLineNo">247</span><a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    try {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return createFileCreator3();<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    } catch (NoSuchMethodException e) {<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    }<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    return createFileCreator2();<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  }<a name="line.255"></a>
-<span class="sourceLineNo">256</span><a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // cancel the processing if DFSClient is already closed.<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.258"></a>
+<span class="sourceLineNo">248</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      CryptoProtocolVersion[].class);<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        supportedVersions) -&gt; {<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.255"></a>
+<span class="sourceLineNo">256</span>        createParent, replication, blockSize, supportedVersions);<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    };<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  }<a name="line.258"></a>
 <span class="sourceLineNo">259</span><a name="line.259"></a>
-<span class="sourceLineNo">260</span>    private final DFSClient client;<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    public CancelOnClose(DFSClient client) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      this.client = client;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
-<span class="sourceLineNo">265</span><a name="line.265"></a>
-<span class="sourceLineNo">266</span>    @Override<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    public boolean progress() {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  static {<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    try {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      LEASE_MANAGER = createLeaseManager();<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      FILE_CREATOR = createFileCreator();<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    } catch (Exception e) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.278"></a>
-<span class="sourceLineNo">279</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          "HBASE-16110 for more information.";<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      LOG.error(msg, e);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      throw new Error(msg, e);<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
-<span class="sourceLineNo">285</span><a name="line.285"></a>
-<span class="sourceLineNo">286</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.287"></a>
+<span class="sourceLineNo">260</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    try {<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      return createFileCreator3_3();<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    } catch (NoSuchMethodException e) {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 3.2 or below");<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    }<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>    try {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      return createFileCreator3();<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    } catch (NoSuchMethodException e) {<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    }<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    return createFileCreator2();<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  }<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>  // cancel the processing if DFSClient is already closed.<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.276"></a>
+<span class="sourceLineNo">277</span><a name="line.277"></a>
+<span class="sourceLineNo">278</span>    private final DFSClient client;<a name="line.278"></a>
+<span class="sourceLineNo">279</span><a name="line.279"></a>
+<span class="sourceLineNo">280</span>    public CancelOnClose(DFSClient client) {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      this.client = client;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    public boolean progress() {<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
 <span class="sourceLineNo">288</span>  }<a name="line.288"></a>
 <span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  }<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    return client.getConf().createChecksum(null);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  }<a name="line.296"></a>
-<span class="sourceLineNo">297</span><a name="line.297"></a>
-<span class="sourceLineNo">298</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    List&lt;Integer&gt; flagList = ack.getFlagList();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    Integer headerFlag;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    if (flagList.isEmpty()) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      Status reply = ack.getReply(0);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      headerFlag = PipelineAck.combineHeader(ECN.DISABLED, reply);<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    } else {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      headerFlag = flagList.get(0);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    return PipelineAck.getStatusFromHeader(headerFlag);<a name="line.307"></a>
-<span class="sourceLineNo">308</span>  }<a name="line.308"></a>
-<span class="sourceLineNo">309</span><a name="line.309"></a>
-<span class="sourceLineNo">310</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      new ProtobufVarint32FrameDecoder(),<a name="line.313"></a>
-<span class="sourceLineNo">314</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>        @Override<a name="line.317"></a>
-<span class="sourceLineNo">318</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.318"></a>
-<span class="sourceLineNo">319</span>            throws Exception {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>          Status pipelineStatus = resp.getStatus();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.322"></a>
-<span class="sourceLineNo">323</span>          }<a name="line.323"></a>
-<span class="sourceLineNo">324</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.324"></a>
-<span class="sourceLineNo">325</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.327"></a>
-<span class="sourceLineNo">328</span>                  resp.getMessage() + ", " + logInfo);<a name="line.328"></a>
-<span class="sourceLineNo">329</span>            } else {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.330"></a>
-<span class="sourceLineNo">331</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>            }<a name="line.332"></a>
-<span class="sourceLineNo">333</span>          }<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // success<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          ChannelPipeline p = ctx.pipeline();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.337"></a>
-<span class="sourceLineNo">338</span>            // of pipeline.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>            if (handler instanceof IdleStateHandler) {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>              break;<a name="line.340"></a>
-<span class="sourceLineNo">341</span>            }<a name="line.341"></a>
-<span class="sourceLineNo">342</span>          }<a name="line.342"></a>
-<span class="sourceLineNo">343</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.343"></a>
-<span class="sourceLineNo">344</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>          ctx.channel().config().setAutoRead(false);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>          promise.trySuccess(ctx.channel());<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
-<span class="sourceLineNo">348</span><a name="line.348"></a>
-<span class="sourceLineNo">349</span>        @Override<a name="line.349"></a>
-<span class="sourceLineNo">350</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.351"></a>
-<span class="sourceLineNo">352</span>        }<a name="line.352"></a>
-<span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>        @Override<a name="line.354"></a>
-<span class="sourceLineNo">355</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.356"></a>
-<span class="sourceLineNo">357</span>            promise<a name="line.357"></a>
-<span class="sourceLineNo">358</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>          } else {<a name="line.359"></a>
-<span class="sourceLineNo">360</span>            super.userEventTriggered(ctx, evt);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>          }<a name="line.361"></a>
-<span class="sourceLineNo">362</span>        }<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>        @Override<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>          promise.tryFailure(cause);<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        }<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      });<a name="line.368"></a>
-<span class="sourceLineNo">369</span>  }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>  private static void requestWriteBlock(Channel channel, StorageType storageType,<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    OpWriteBlockProto proto =<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    int protoLen = proto.getSerializedSize();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    ByteBuf buffer =<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    channel.writeAndFlush(buffer);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  }<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      StorageType storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      throws IOException {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>      @Override<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        if (future.isSuccess()) {<a name="line.394"></a>
-<span class="sourceLineNo">395</span>          // setup response processing pipeline first, then send request.<a name="line.395"></a>
-<span class="sourceLineNo">396</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.396"></a>
-<span class="sourceLineNo">397</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        } else {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>          promise.tryFailure(future.cause());<a name="line.399"></a>
-<span class="sourceLineNo">400</span>        }<a name="line.400"></a>
-<span class="sourceLineNo">401</span>      }<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    });<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
-<span class="sourceLineNo">404</span><a name="line.404"></a>
-<span class="sourceLineNo">405</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.407"></a>
-<span class="sourceLineNo">408</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    StorageType[] storageTypes = locatedBlock.getStorageTypes();<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    boolean connectToDnViaHostname =<a name="line.411"></a>
-<span class="sourceLineNo">412</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PBHelperClient.convert(blockCopy))<a name="line.417"></a>
-<span class="sourceLineNo">418</span>        .setToken(PBHelperClient.convert(locatedBlock.getBlockToken())))<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      .setClientName(clientName).build();<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.423"></a>
-<span class="sourceLineNo">424</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.424"></a>
-<span class="sourceLineNo">425</span>        .setRequestedChecksum(checksumProto)<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      StorageType storageType = storageTypes[i];<a name="line.430"></a>
-<span class="sourceLineNo">431</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      futureList.add(promise);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>            @Override<a name="line.437"></a>
-<span class="sourceLineNo">438</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.438"></a>
-<span class="sourceLineNo">439</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.439"></a>
-<span class="sourceLineNo">440</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.440"></a>
-<span class="sourceLineNo">441</span>              // a null handler.<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            }<a name="line.442"></a>
-<span class="sourceLineNo">443</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>            @Override<a name="line.445"></a>
-<span class="sourceLineNo">446</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>              if (future.isSuccess()) {<a name="line.447"></a>
-<span class="sourceLineNo">448</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.448"></a>
-<span class="sourceLineNo">449</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.449"></a>
-<span class="sourceLineNo">450</span>              } else {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>                promise.tryFailure(future.cause());<a name="line.451"></a>
-<span class="sourceLineNo">452</span>              }<a name="line.452"></a>
-<span class="sourceLineNo">453</span>            }<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          });<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    }<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    return futureList;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>  }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  public static class NameNodeException extends IOException {<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>    public NameNodeException(Throwable cause) {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      super(cause);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    }<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
-<span class="sourceLineNo">470</span><a name="line.470"></a>
-<span class="sourceLineNo">471</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.471"></a>
-<span class="sourceLineNo">472</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.472"></a>
-<span class="sourceLineNo">473</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    Configuration conf = dfs.getConf();<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    DFSClient client = dfs.getClient();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    String clientName = client.getClientName();<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    ClientProtocol namenode = client.getNamenode();<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.479"></a>
-<span class="sourceLineNo">480</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    for (int retry = 0;; retry++) {<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      HdfsFileStatus stat;<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.486"></a>
-<span class="sourceLineNo">487</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      } catch (Exception e) {<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        if (e instanceof RemoteException) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>          throw (RemoteException) e;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        } else {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>          throw new NameNodeException(e);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        }<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      }<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      beginFileLease(client, stat.getFileId());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      boolean succ = false;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      LocatedBlock locatedBlock = null;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      try {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>        DataChecksum summer = createChecksum(client);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        locatedBlock = namenode.addBlock(src, client.getClientName(), null, excludesNodes,<a name="line.502"></a>
-<span class="sourceLineNo">503</span>          stat.getFileId(), null, null);<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          try {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          } catch (Exception e) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>            // exclude the broken DN next time<a name="line.511"></a>
-<span class="sourceLineNo">512</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.512"></a>
-<span class="sourceLineNo">513</span>            throw e;<a name="line.513"></a>
-<span class="sourceLineNo">514</span>          }<a name="line.514"></a>
-<span class="sourceLineNo">515</span>        }<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.516"></a>
-<span class="sourceLineNo">517</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.518"></a>
-<span class="sourceLineNo">519</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.519"></a>
-<span class="sourceLineNo">520</span>        succ = true;<a name="line.520"></a>
-<span class="sourceLineNo">521</span>        return output;<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      } catch (RemoteException e) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.523"></a>
-<span class="sourceLineNo">524</span>        if (shouldRetryCreate(e)) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          if (retry &gt;= createMaxRetries) {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>            throw e.unwrapRemoteException();<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          }<a name="line.527"></a>
-<span class="sourceLineNo">528</span>        } else {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          throw e.unwrapRemoteException();<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      } catch (IOException e) {<a name="line.531"></a>
-<span class="sourceLineNo">532</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>        if (retry &gt;= createMaxRetries) {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          throw e;<a name="line.534"></a>
-<span class="sourceLineNo">535</span>        }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>        // overwrite the old broken file.<a name="line.536"></a>
-<span class="sourceLineNo">537</span>        overwrite = true;<a name="line.537"></a>
-<span class="sourceLineNo">538</span>        try {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.539"></a>
-<span class="sourceLineNo">540</span>        } catch (InterruptedException ie) {<a name="line.540"></a>
-<span class="sourceLineNo">541</span>          throw new InterruptedIOException();<a name="line.541"></a>
-<span class="sourceLineNo">542</span>        }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      } finally {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>        if (!succ) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>          if (futureList != null) {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>                @Override<a name="line.549"></a>
-<span class="sourceLineNo">550</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.550"></a>
-<span class="sourceLineNo">551</span>                  if (future.isSuccess()) {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>                    future.getNow().close();<a name="line.552"></a>
-<span class="sourceLineNo">553</span>                  }<a name="line.553"></a>
-<span class="sourceLineNo">554</span>                }<a name="line.554"></a>
-<span class="sourceLineNo">555</span>              });<a name="line.555"></a>
-<span class="sourceLineNo">556</span>            }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          }<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          endFileLease(client, stat.getFileId());<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * inside an {@link EventLoop}.<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.569"></a>
-<span class="sourceLineNo">570</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.571"></a>
-<span class="sourceLineNo">572</span><a name="line.572"></a>
-<span class="sourceLineNo">573</span>      @Override<a name="line.573"></a>
-<span class="sourceLineNo">574</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.574"></a>
-<span class="sourceLineNo">575</span>          throws IOException, UnresolvedLinkException {<a name="line.575"></a>
-<span class="sourceLineNo">576</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.576"></a>
-<span class="sourceLineNo">577</span>          blockSize, eventLoopGroup, channelClass);<a name="line.577"></a>
+<span class="sourceLineNo">290</span>  static {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    try {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      LEASE_MANAGER = createLeaseManager();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.293"></a>
+<span class="sourceLineNo">294</span>      FILE_CREATOR = createFileCreator();<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    } catch (Exception e) {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.297"></a>
+<span class="sourceLineNo">298</span>          "HBASE-16110 for more information.";<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      LOG.error(msg, e);<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      throw new Error(msg, e);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    }<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
+<span class="sourceLineNo">303</span><a name="line.303"></a>
+<span class="sourceLineNo">304</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  }<a name="line.306"></a>
+<span class="sourceLineNo">307</span><a name="line.307"></a>
+<span class="sourceLineNo">308</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    return client.getConf().createChecksum(null);<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  }<a name="line.314"></a>
+<span class="sourceLineNo">315</span><a name="line.315"></a>
+<span class="sourceLineNo">316</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    List&lt;Integer&gt; flagList = ack.getFlagList();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    Integer headerFlag;<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    if (flagList.isEmpty()) {<a name="line.319"></a>
+<span class="sourceLineNo">320</span>      Status reply = ack.getReply(0);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      headerFlag = PipelineAck.combineHeader(ECN.DISABLED, reply);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    } else {<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      headerFlag = flagList.get(0);<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    }<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    return PipelineAck.getStatusFromHeader(headerFlag);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.328"></a>
+<span class="sourceLineNo">329</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.330"></a>
+<span class="sourceLineNo">331</span>      new ProtobufVarint32FrameDecoder(),<a name="line.331"></a>
+<span class="sourceLineNo">332</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.333"></a>
+<span class="sourceLineNo">334</span><a name="line.334"></a>
+<span class="sourceLineNo">335</span>        @Override<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.336"></a>
+<span class="sourceLineNo">337</span>            throws Exception {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          Status pipelineStatus = resp.getStatus();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.339"></a>
+<span class="sourceLineNo">340</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.340"></a>
+<span class="sourceLineNo">341</span>          }<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.343"></a>
+<span class="sourceLineNo">344</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.344"></a>
+<span class="sourceLineNo">345</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.345"></a>
+<span class="sourceLineNo">346</span>                  resp.getMessage() + ", " + logInfo);<a name="line.346"></a>
+<span class="sourceLineNo">347</span>            } else {<a name="line.347"></a>
+<span class="sourceLineNo">348</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.348"></a>
+<span class="sourceLineNo">349</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.349"></a>
+<span class="sourceLineNo">350</span>            }<a name="line.350"></a>
+<span class="sourceLineNo">351</span>          }<a name="line.351"></a>
+<span class="sourceLineNo">352</span>          // success<a name="line.352"></a>
+<span class="sourceLineNo">353</span>          ChannelPipeline p = ctx.pipeline();<a name="line.353"></a>
+<span class="sourceLineNo">354</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.354"></a>
+<span class="sourceLineNo">355</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.355"></a>
+<span class="sourceLineNo">356</span>            // of pipeline.<a name="line.356"></a>
+<span class="sourceLineNo">357</span>            if (handler instanceof IdleStateHandler) {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>              break;<a name="line.358"></a>
+<span class="sourceLineNo">359</span>            }<a name="line.359"></a>
+<span class="sourceLineNo">360</span>          }<a name="line.360"></a>
+<span class="sourceLineNo">361</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.361"></a>
+<span class="sourceLineNo">362</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.362"></a>
+<span class="sourceLineNo">363</span>          ctx.channel().config().setAutoRead(false);<a name="line.363"></a>
+<span class="sourceLineNo">364</span>          promise.trySuccess(ctx.channel());<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
+<span class="sourceLineNo">366</span><a name="line.366"></a>
+<span class="sourceLineNo">367</span>        @Override<a name="line.367"></a>
+<span class="sourceLineNo">368</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.369"></a>
+<span class="sourceLineNo">370</span>        }<a name="line.370"></a>
+<span class="sourceLineNo">371</span><a name="line.371"></a>
+<span class="sourceLineNo">372</span>        @Override<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.373"></a>
+<span class="sourceLineNo">374</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>            promise<a name="line.375"></a>
+<span class="sourceLineNo">376</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.376"></a>
+<span class="sourceLineNo">377</span>          } else {<a name="line.377"></a>
+<span class="sourceLineNo">378</span>            super.userEventTriggered(ctx, evt);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>          }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>        }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>        @Override<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          promise.tryFailure(cause);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>        }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      });<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  }<a name="line.387"></a>
+<span class="sourceLineNo">388</span><a name="line.388"></a>
+<span class="sourceLineNo">389</span>  private static void requestWriteBlock(Channel channel, StorageType storageType,<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    OpWriteBlockProto proto =<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();<a name="line.392"></a>
+<span class="sourceLineNo">393</span>    int protoLen = proto.getSerializedSize();<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    ByteBuf buffer =<a name="line.394"></a>
+<span class="sourceLineNo">395</span>      channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.397"></a>
+<span class="sourceLineNo">398</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    channel.writeAndFlush(buffer);<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  }<a name="line.400"></a>
+<span class="sourceLineNo">401</span><a name="line.401"></a>
+<span class="sourceLineNo">402</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.402"></a>
+<span class="sourceLineNo">403</span>      StorageType storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.403"></a>
+<span class="sourceLineNo">404</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.404"></a>
+<span class="sourceLineNo">405</span>      throws IOException {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.406"></a>
+<span class="sourceLineNo">407</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.407"></a>
+<span class="sourceLineNo">408</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.408"></a>
+<span class="sourceLineNo">409</span><a name="line.409"></a>
+<span class="sourceLineNo">410</span>      @Override<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>        if (future.isSuccess()) {<a name="line.412"></a>
+<span class="sourceLineNo">413</span>          // setup response processing pipeline first, then send request.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.415"></a>
+<span class="sourceLineNo">416</span>        } else {<a name="line.416"></a>
+<span class="sourceLineNo">417</span>          promise.tryFailure(future.cause());<a name="line.417"></a>
+<span class="sourceLineNo">418</span>        }<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      }<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    });<a name="line.420"></a>
+<span class="sourceLineNo">421</span>  }<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.425"></a>
+<span class="sourceLineNo">426</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    StorageType[] storageTypes = locatedBlock.getStorageTypes();<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.428"></a>
+<span class="sourceLineNo">429</span>    boolean connectToDnViaHostname =<a name="line.429"></a>
+<span class="sourceLineNo">430</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.432"></a>
+<span class="sourceLineNo">433</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.434"></a>
+<span class="sourceLineNo">435</span>      .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PBHelperClient.convert(blockCopy))<a name="line.435"></a>
+<span class="sourceLineNo">436</span>        .setToken(PBHelperClient.convert(locatedBlock.getBlockToken())))<a name="line.436"></a>
+<span class="sourceLineNo">437</span>      .setClientName(clientName).build();<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.438"></a>
+<span class="sourceLineNo">439</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        .setRequestedChecksum(checksumProto)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.445"></a>
+<span class="sourceLineNo">446</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.447"></a>
+<span class="sourceLineNo">448</span>      StorageType storageType = storageTypes[i];<a name="line.448"></a>
+<span class="sourceLineNo">449</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>      futureList.add(promise);<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.453"></a>
+<span class="sourceLineNo">454</span><a name="line.454"></a>
+<span class="sourceLineNo">455</span>            @Override<a name="line.455"></a>
+<span class="sourceLineNo">456</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.456"></a>
+<span class="sourceLineNo">457</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.457"></a>
+<span class="sourceLineNo">458</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.458"></a>
+<span class="sourceLineNo">459</span>              // a null handler.<a name="line.459"></a>
+<span class="sourceLineNo">460</span>            }<a name="line.460"></a>
+<span class="sourceLineNo">461</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.461"></a>
+<span class="sourceLineNo">462</span><a name="line.462"></a>
+<span class="sourceLineNo">463</span>            @Override<a name="line.463"></a>
+<span class="sourceLineNo">464</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>              if (future.isSuccess()) {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.466"></a>
+<span class="sourceLineNo">467</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.467"></a>
+<span class="sourceLineNo">468</span>              } else {<a name="line.468"></a>
+<span class="sourceLineNo">469</span>                promise.tryFailure(future.cause());<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              }<a name="line.470"></a>
+<span class="sourceLineNo">471</span>            }<a name="line.471"></a>
+<span class="sourceLineNo">472</span>          });<a name="line.472"></a>
+<span class="sourceLineNo">473</span>    }<a name="line.473"></a>
+<span class="sourceLineNo">474</span>    return futureList;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>  }<a name="line.475"></a>
+<span class="sourceLineNo">476</span><a name="line.476"></a>
+<span class="sourceLineNo">477</span>  /**<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   */<a name="line.479"></a>
+<span class="sourceLineNo">480</span>  public static class NameNodeException extends IOException {<a name="line.480"></a>
+<span class="sourceLineNo">481</span><a name="line.481"></a>
+<span class="sourceLineNo">482</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>    public NameNodeException(Throwable cause) {<a name="line.484"></a>
+<span class="sourceLineNo">485</span>      super(cause);<a name="line.485"></a>
+<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
+<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
+<span class="sourceLineNo">488</span><a name="line.488"></a>
+<span class="sourceLineNo">489</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.490"></a>
+<span class="sourceLineNo">491</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    Configuration conf = dfs.getConf();<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    DFSClient client = dfs.getClient();<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    String clientName = client.getClientName();<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    ClientProtocol namenode = client.getNamenode();<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    for (int retry = 0;; retry++) {<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      HdfsFileStatus stat;<a name="line.501"></a>
+<span class="sourceLineNo">502</span>      try {<a name="line.502"></a>
+<span class="sourceLineNo">503</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.503"></a>
+<span class="sourceLineNo">504</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.504"></a>
+<span class="sourceLineNo">505</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.505"></a>
+<span class="sourceLineNo">506</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.506"></a>
+<span class="sourceLineNo">507</span>      } catch (Exception e) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>        if (e instanceof RemoteException) {<a name="line.508"></a>
+<span class="sourceLineNo">509</span>          throw (RemoteException) e;<a name="line.509"></a>
+<span class="sourceLineNo">510</span>        } else {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>          throw new NameNodeException(e);<a name="line.511"></a>
+<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      }<a name="line.513"></a>
+<span class="sourceLineNo">514</span>      beginFileLease(client, stat.getFileId());<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      boolean succ = false;<a name="line.515"></a>
+<span class="sourceLineNo">516</span>      LocatedBlock locatedBlock = null;<a name="line.516"></a>
+<span class="sourceLineNo">517</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.517"></a>
+<span class="sourceLineNo">518</span>      try {<a name="line.518"></a>
+<span class="sourceLineNo">519</span>        DataChecksum summer = createChecksum(client);<a name="line.519"></a>
+<span class="sourceLineNo">520</span>        locatedBlock = namenode.addBlock(src, client.getClientName(), null, excludesNodes,<a name="line.520"></a>
+<span class="sourceLineNo">521</span>          stat.getFileId(), null, null);<a name="line.521"></a>
+<span class="sourceLineNo">522</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.523"></a>
+<span class="sourceLineNo">524</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.524"></a>
+<span class="sourceLineNo">525</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          try {<a name="line.526"></a>
+<span class="sourceLineNo">527</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.527"></a>
+<span class="sourceLineNo">528</span>          } catch (Exception e) {<a name="line.528"></a>
+<span class="sourceLineNo">529</span>            // exclude the broken DN next time<a name="line.529"></a>
+<span class="sourceLineNo">530</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.530"></a>
+<span class="sourceLineNo">531</span>            throw e;<a name="line.531"></a>
+<span class="sourceLineNo">532</span>          }<a name="line.532"></a>
+<span class="sourceLineNo">533</span>        }<a name="line.533"></a>
+<span class="sourceLineNo">534</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.534"></a>
+<span class="sourceLineNo">535</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.535"></a>
+<span class="sourceLineNo">536</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.536"></a>
+<span class="sourceLineNo">537</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        succ = true;<a name="line.538"></a>
+<span class="sourceLineNo">539</span>        return output;<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      } catch (RemoteException e) {<a name="line.540"></a>
+<span class="sourceLineNo">541</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>        if (shouldRetryCreate(e)) {<a name="line.542"></a>
+<span class="sourceLineNo">543</span>          if (retry &gt;= createMaxRetries) {<a name="line.543"></a>
+<span class="sourceLineNo">544</span>            throw e.unwrapRemoteException();<a name="line.544"></a>
+<span class="sourceLineNo">545</span>          }<a name="line.545"></a>
+<span class="sourceLineNo">546</span>        } else {<a name="line.546"></a>
+<span class="sourceLineNo">547</span>          throw e.unwrapRemoteException();<a name="line.547"></a>
+<span class="sourceLineNo">548</span>        }<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      } catch (IOException e) {<a name="line.549"></a>
+<span class="sourceLineNo">550</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.550"></a>
+<span class="sourceLineNo">551</span>        if (retry &gt;= createMaxRetries) {<a name="line.551"></a>
+<span class="sourceLineNo">552</span>          throw e;<a name="line.552"></a>
+<span class="sourceLineNo">553</span>        }<a name="line.553"></a>
+<span class="sourceLineNo">554</span>        // overwrite the old broken file.<a name="line.554"></a>
+<span class="sourceLineNo">555</span>        overwrite = true;<a name="line.555"></a>
+<span class="sourceLineNo">556</span>        try {<a name="line.556"></a>
+<span class="sourceLineNo">557</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.557"></a>
+<span class="sourceLineNo">558</span>        } catch (InterruptedException ie) {<a name="line.558"></a>
+<span class="sourceLineNo">559</span>          throw new InterruptedIOException();<a name="line.559"></a>
+<span class="sourceLineNo">560</span>        }<a name="line.560"></a>
+<span class="sourceLineNo">561</span>      } finally {<a name="line.561"></a>
+<span class="sourceLineNo">562</span>        if (!succ) {<a name="line.562"></a>
+<span class="sourceLineNo">563</span>          if (futureList != null) {<a name="line.563"></a>
+<span class="sourceLineNo">564</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.565"></a>
+<span class="sourceLineNo">566</span><a name="line.566"></a>
+<span class="sourceLineNo">567</span>                @Override<a name="line.567"></a>
+<span class="sourceLineNo">568</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.568"></a>
+<span class="sourceLineNo">569</span>                  if (future.isSuccess()) {<a name="line.569"></a>
+<span class="sourceLineNo">570</span>                    future.getNow().close();<a name="line.570"></a>
+<span class="sourceLineNo">571</span>                  }<a name="line.571"></a>
+<span class="sourceLineNo">572</span>                }<a name="line.572"></a>
+<span class="sourceLineNo">573</span>              });<a name="line.573"></a>
+<span class="sourceLineNo">574</span>            }<a name="line.574"></a>
+<span class="sourceLineNo">575</span>          }<a name="line.575"></a>
+<span class="sourceLineNo">576</span>          endFileLease(client, stat.getFileId());<a name="line.576"></a>
+<span class="sourceLineNo">577</span>        }<a name="line.577"></a>
 <span class="sourceLineNo">578</span>      }<a name="line.578"></a>
-<span class="sourceLineNo">579</span><a name="line.579"></a>
-<span class="sourceLineNo">580</span>      @Override<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>        throw new UnsupportedOperationException();<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      }<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }.resolve(dfs, f);<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.589"></a>
-<span class="sourceLineNo">590</span>    // DFSOutputStream.newStreamForCreate.<a name="line.590"></a>
-<span class="sourceLineNo">591</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.591"></a>
-<span class="sourceLineNo">592</span>  }<a name="line.592"></a>
-<span class="sourceLineNo">593</span><a name="line.593"></a>
-<span class="sourceLineNo">594</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      ExtendedBlock block, long fileId) {<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    for (int retry = 0;; retry++) {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      try {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>          endFileLease(client, fileId);<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          return;<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } else {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      } catch (RemoteException e) {<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        IOException ioe = e.unwrapRemoteException();<a name="line.605"></a>
-<span class="sourceLineNo">606</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>          return;<a name="line.608"></a>
-<span class="sourceLineNo">609</span>        } else {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        }<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      } catch (Exception e) {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      }<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      sleepIgnoreInterrupt(retry);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    }<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  }<a name="line.617"></a>
-<span class="sourceLineNo">618</span><a name="line.618"></a>
-<span class="sourceLineNo">619</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    try {<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    } catch (InterruptedException e) {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    }<a name="line.623"></a>
-<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
-<span class="sourceLineNo">625</span>}<a name="line.625"></a>
+<span class="sourceLineNo">579</span>    }<a name="line.579"></a>
+<span class="sourceLineNo">580</span>  }<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>  /**<a name="line.582"></a>
+<span class="sourceLineNo">583</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.583"></a>
+<span class="sourceLineNo">584</span>   * inside an {@link EventLoop}.<a name="line.584"></a>
+<span class="sourceLineNo">585</span>   */<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.587"></a>
+<span class="sourceLineNo">588</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.588"></a>
+<span class="sourceLineNo">589</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.589"></a>
+<span class="sourceLineNo">590</span><a name="line.590"></a>
+<span class="sourceLineNo">591</span>      @Override<a name="line.591"></a>
+<span class="sourceLineNo">592</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.592"></a>
+<span class="sourceLineNo">593</span>          throws IOException, UnresolvedLinkException {<a name="line.593"></a>
+<span class="sourceLineNo">594</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.594"></a>
+<span class="sourceLineNo">595</span>          blockSize, eventLoopGroup, channelClass);<a name="line.595"></a>
+<span class="sourceLineNo">596</span>      }<a name="line.596"></a>
+<span class="sourceLineNo">597</span><a name="line.597"></a>
+<span class="sourceLineNo">598</span>      @Override<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.599"></a>
+<span class="sourceLineNo">600</span>        throw new UnsupportedOperationException();<a name="line.600"></a>
+<span class="sourceLineNo">601</span>      }<a name="line.601"></a>
+<span class="sourceLineNo">602</span>    }.resolve(dfs, f);<a name="line.602"></a>
+<span class="sourceLineNo">603</span>  }<a name="line.603"></a>
+<span class="sourceLineNo">604</span><a name="line.604"></a>
+<span class="sourceLineNo">605</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.607"></a>
+<span class="sourceLineNo">608</span>    // DFSOutputStream.newStreamForCreate.<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.609"></a>
+<span class="sourceLineNo">610</span>  }<a name="line.610"></a>
+<span class="sourceLineNo">611</span><a name="line.611"></a>
+<span class="sourceLineNo">612</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.612"></a>
+<span class="sourceLineNo">613</span>      ExtendedBlock block, long fileId) {<a name="line.613"></a>
+<span class="sourceLineNo">614</span>    for (int retry = 0;; retry++) {<a name="line.614"></a>
+<span class="sourceLineNo">615</span>      try {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.616"></a>
+<span class="sourceLineNo">617</span>          endFileLease(client, fileId);<a name="line.617"></a>
+<span class="sourceLineNo">618</span>          return;<a name="line.618"></a>
+<span class="sourceLineNo">619</span>        } else {<a name="line.619"></a>
+<span class="sourceLineNo">620</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.620"></a>
+<span class="sourceLineNo">621</span>        }<a name="line.621"></a>
+<span class="sourceLineNo">622</span>      } catch (RemoteException e) {<a name="line.622"></a>
+<span class="sourceLineNo">623</span>        IOException ioe = e.unwrapRemoteException();<a name="line.623"></a>
+<span class="sourceLineNo">624</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.624"></a>
+<span class="sourceLineNo">625</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.625"></a>
+<span class="sourceLineNo">626</span>          return;<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        } else {<a name="line.627"></a>
+<span class="sourceLineNo">628</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.628"></a>
+<span class="sourceLineNo">629</span>        }<a name="line.629"></a>
+<span class="sourceLineNo">630</span>      } catch (Exception e) {<a name="line.630"></a>
+<span class="sourceLineNo">631</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      }<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      sleepIgnoreInterrupt(retry);<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
+<span class="sourceLineNo">635</span>  }<a name="line.635"></a>
+<span class="sourceLineNo">636</span><a name="line.636"></a>
+<span class="sourceLineNo">637</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.637"></a>
+<span class="sourceLineNo">638</span>    try {<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.639"></a>
+<span class="sourceLineNo">640</span>    } catch (InterruptedException e) {<a name="line.640"></a>
+<span class="sourceLineNo">641</span>    }<a name="line.641"></a>
+<span class="sourceLineNo">642</span>  }<a name="line.642"></a>
+<span class="sourceLineNo">643</span>}<a name="line.643"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html
index 6684af5..3556576 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.html
@@ -229,408 +229,426 @@
 <span class="sourceLineNo">221</span>    };<a name="line.221"></a>
 <span class="sourceLineNo">222</span>  }<a name="line.222"></a>
 <span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.224"></a>
+<span class="sourceLineNo">224</span>  private static FileCreator createFileCreator3_3() throws NoSuchMethodException {<a name="line.224"></a>
 <span class="sourceLineNo">225</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.226"></a>
-<span class="sourceLineNo">227</span>      CryptoProtocolVersion[].class, String.class);<a name="line.227"></a>
+<span class="sourceLineNo">226</span>        String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        CryptoProtocolVersion[].class, String.class, String.class);<a name="line.227"></a>
 <span class="sourceLineNo">228</span><a name="line.228"></a>
 <span class="sourceLineNo">229</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.229"></a>
 <span class="sourceLineNo">230</span>        supportedVersions) -&gt; {<a name="line.230"></a>
 <span class="sourceLineNo">231</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.231"></a>
-<span class="sourceLineNo">232</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.232"></a>
+<span class="sourceLineNo">232</span>          createParent, replication, blockSize, supportedVersions, null, null);<a name="line.232"></a>
 <span class="sourceLineNo">233</span>    };<a name="line.233"></a>
 <span class="sourceLineNo">234</span>  }<a name="line.234"></a>
 <span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.236"></a>
+<span class="sourceLineNo">236</span>  private static FileCreator createFileCreator3() throws NoSuchMethodException {<a name="line.236"></a>
 <span class="sourceLineNo">237</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.237"></a>
 <span class="sourceLineNo">238</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      CryptoProtocolVersion[].class);<a name="line.239"></a>
+<span class="sourceLineNo">239</span>      CryptoProtocolVersion[].class, String.class);<a name="line.239"></a>
 <span class="sourceLineNo">240</span><a name="line.240"></a>
 <span class="sourceLineNo">241</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.241"></a>
 <span class="sourceLineNo">242</span>        supportedVersions) -&gt; {<a name="line.242"></a>
 <span class="sourceLineNo">243</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        createParent, replication, blockSize, supportedVersions);<a name="line.244"></a>
+<span class="sourceLineNo">244</span>        createParent, replication, blockSize, supportedVersions, null);<a name="line.244"></a>
 <span class="sourceLineNo">245</span>    };<a name="line.245"></a>
 <span class="sourceLineNo">246</span>  }<a name="line.246"></a>
 <span class="sourceLineNo">247</span><a name="line.247"></a>
-<span class="sourceLineNo">248</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    try {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      return createFileCreator3();<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    } catch (NoSuchMethodException e) {<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    }<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    return createFileCreator2();<a name="line.254"></a>
-<span class="sourceLineNo">255</span>  }<a name="line.255"></a>
-<span class="sourceLineNo">256</span><a name="line.256"></a>
-<span class="sourceLineNo">257</span>  // cancel the processing if DFSClient is already closed.<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.258"></a>
+<span class="sourceLineNo">248</span>  private static FileCreator createFileCreator2() throws NoSuchMethodException {<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    Method createMethod = ClientProtocol.class.getMethod("create", String.class, FsPermission.class,<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      String.class, EnumSetWritable.class, boolean.class, short.class, long.class,<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      CryptoProtocolVersion[].class);<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>    return (instance, src, masked, clientName, flag, createParent, replication, blockSize,<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        supportedVersions) -&gt; {<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      return (HdfsFileStatus) createMethod.invoke(instance, src, masked, clientName, flag,<a name="line.255"></a>
+<span class="sourceLineNo">256</span>        createParent, replication, blockSize, supportedVersions);<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    };<a name="line.257"></a>
+<span class="sourceLineNo">258</span>  }<a name="line.258"></a>
 <span class="sourceLineNo">259</span><a name="line.259"></a>
-<span class="sourceLineNo">260</span>    private final DFSClient client;<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    public CancelOnClose(DFSClient client) {<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      this.client = client;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
-<span class="sourceLineNo">265</span><a name="line.265"></a>
-<span class="sourceLineNo">266</span>    @Override<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    public boolean progress() {<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    }<a name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  static {<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    try {<a name="line.273"></a>
-<span class="sourceLineNo">274</span>      LEASE_MANAGER = createLeaseManager();<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      FILE_CREATOR = createFileCreator();<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    } catch (Exception e) {<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.278"></a>
-<span class="sourceLineNo">279</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.279"></a>
-<span class="sourceLineNo">280</span>          "HBASE-16110 for more information.";<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      LOG.error(msg, e);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      throw new Error(msg, e);<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
-<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
-<span class="sourceLineNo">285</span><a name="line.285"></a>
-<span class="sourceLineNo">286</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.287"></a>
+<span class="sourceLineNo">260</span>  private static FileCreator createFileCreator() throws NoSuchMethodException {<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    try {<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      return createFileCreator3_3();<a name="line.262"></a>
+<span class="sourceLineNo">263</span>    } catch (NoSuchMethodException e) {<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 3.2 or below");<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    }<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>    try {<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      return createFileCreator3();<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    } catch (NoSuchMethodException e) {<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      LOG.debug("ClientProtocol::create wrong number of arguments, should be hadoop 2.x");<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    }<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    return createFileCreator2();<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  }<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>  // cancel the processing if DFSClient is already closed.<a name="line.275"></a>
+<span class="sourceLineNo">276</span>  static final class CancelOnClose implements CancelableProgressable {<a name="line.276"></a>
+<span class="sourceLineNo">277</span><a name="line.277"></a>
+<span class="sourceLineNo">278</span>    private final DFSClient client;<a name="line.278"></a>
+<span class="sourceLineNo">279</span><a name="line.279"></a>
+<span class="sourceLineNo">280</span>    public CancelOnClose(DFSClient client) {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      this.client = client;<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>    @Override<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    public boolean progress() {<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      return DFS_CLIENT_ADAPTOR.isClientRunning(client);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    }<a name="line.287"></a>
 <span class="sourceLineNo">288</span>  }<a name="line.288"></a>
 <span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.290"></a>
-<span class="sourceLineNo">291</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.291"></a>
-<span class="sourceLineNo">292</span>  }<a name="line.292"></a>
-<span class="sourceLineNo">293</span><a name="line.293"></a>
-<span class="sourceLineNo">294</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    return client.getConf().createChecksum(null);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>  }<a name="line.296"></a>
-<span class="sourceLineNo">297</span><a name="line.297"></a>
-<span class="sourceLineNo">298</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    List&lt;Integer&gt; flagList = ack.getFlagList();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    Integer headerFlag;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    if (flagList.isEmpty()) {<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      Status reply = ack.getReply(0);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      headerFlag = PipelineAck.combineHeader(ECN.DISABLED, reply);<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    } else {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      headerFlag = flagList.get(0);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    return PipelineAck.getStatusFromHeader(headerFlag);<a name="line.307"></a>
-<span class="sourceLineNo">308</span>  }<a name="line.308"></a>
-<span class="sourceLineNo">309</span><a name="line.309"></a>
-<span class="sourceLineNo">310</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      new ProtobufVarint32FrameDecoder(),<a name="line.313"></a>
-<span class="sourceLineNo">314</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.315"></a>
-<span class="sourceLineNo">316</span><a name="line.316"></a>
-<span class="sourceLineNo">317</span>        @Override<a name="line.317"></a>
-<span class="sourceLineNo">318</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.318"></a>
-<span class="sourceLineNo">319</span>            throws Exception {<a name="line.319"></a>
-<span class="sourceLineNo">320</span>          Status pipelineStatus = resp.getStatus();<a name="line.320"></a>
-<span class="sourceLineNo">321</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.321"></a>
-<span class="sourceLineNo">322</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.322"></a>
-<span class="sourceLineNo">323</span>          }<a name="line.323"></a>
-<span class="sourceLineNo">324</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.324"></a>
-<span class="sourceLineNo">325</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.325"></a>
-<span class="sourceLineNo">326</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.327"></a>
-<span class="sourceLineNo">328</span>                  resp.getMessage() + ", " + logInfo);<a name="line.328"></a>
-<span class="sourceLineNo">329</span>            } else {<a name="line.329"></a>
-<span class="sourceLineNo">330</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.330"></a>
-<span class="sourceLineNo">331</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>            }<a name="line.332"></a>
-<span class="sourceLineNo">333</span>          }<a name="line.333"></a>
-<span class="sourceLineNo">334</span>          // success<a name="line.334"></a>
-<span class="sourceLineNo">335</span>          ChannelPipeline p = ctx.pipeline();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.336"></a>
-<span class="sourceLineNo">337</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.337"></a>
-<span class="sourceLineNo">338</span>            // of pipeline.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>            if (handler instanceof IdleStateHandler) {<a name="line.339"></a>
-<span class="sourceLineNo">340</span>              break;<a name="line.340"></a>
-<span class="sourceLineNo">341</span>            }<a name="line.341"></a>
-<span class="sourceLineNo">342</span>          }<a name="line.342"></a>
-<span class="sourceLineNo">343</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.343"></a>
-<span class="sourceLineNo">344</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>          ctx.channel().config().setAutoRead(false);<a name="line.345"></a>
-<span class="sourceLineNo">346</span>          promise.trySuccess(ctx.channel());<a name="line.346"></a>
-<span class="sourceLineNo">347</span>        }<a name="line.347"></a>
-<span class="sourceLineNo">348</span><a name="line.348"></a>
-<span class="sourceLineNo">349</span>        @Override<a name="line.349"></a>
-<span class="sourceLineNo">350</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.351"></a>
-<span class="sourceLineNo">352</span>        }<a name="line.352"></a>
-<span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>        @Override<a name="line.354"></a>
-<span class="sourceLineNo">355</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.355"></a>
-<span class="sourceLineNo">356</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.356"></a>
-<span class="sourceLineNo">357</span>            promise<a name="line.357"></a>
-<span class="sourceLineNo">358</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.358"></a>
-<span class="sourceLineNo">359</span>          } else {<a name="line.359"></a>
-<span class="sourceLineNo">360</span>            super.userEventTriggered(ctx, evt);<a name="line.360"></a>
-<span class="sourceLineNo">361</span>          }<a name="line.361"></a>
-<span class="sourceLineNo">362</span>        }<a name="line.362"></a>
-<span class="sourceLineNo">363</span><a name="line.363"></a>
-<span class="sourceLineNo">364</span>        @Override<a name="line.364"></a>
-<span class="sourceLineNo">365</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.365"></a>
-<span class="sourceLineNo">366</span>          promise.tryFailure(cause);<a name="line.366"></a>
-<span class="sourceLineNo">367</span>        }<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      });<a name="line.368"></a>
-<span class="sourceLineNo">369</span>  }<a name="line.369"></a>
-<span class="sourceLineNo">370</span><a name="line.370"></a>
-<span class="sourceLineNo">371</span>  private static void requestWriteBlock(Channel channel, StorageType storageType,<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    OpWriteBlockProto proto =<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    int protoLen = proto.getSerializedSize();<a name="line.375"></a>
-<span class="sourceLineNo">376</span>    ByteBuf buffer =<a name="line.376"></a>
-<span class="sourceLineNo">377</span>      channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.377"></a>
-<span class="sourceLineNo">378</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    channel.writeAndFlush(buffer);<a name="line.381"></a>
-<span class="sourceLineNo">382</span>  }<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      StorageType storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      throws IOException {<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.388"></a>
-<span class="sourceLineNo">389</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.389"></a>
-<span class="sourceLineNo">390</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.390"></a>
-<span class="sourceLineNo">391</span><a name="line.391"></a>
-<span class="sourceLineNo">392</span>      @Override<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>        if (future.isSuccess()) {<a name="line.394"></a>
-<span class="sourceLineNo">395</span>          // setup response processing pipeline first, then send request.<a name="line.395"></a>
-<span class="sourceLineNo">396</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.396"></a>
-<span class="sourceLineNo">397</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.397"></a>
-<span class="sourceLineNo">398</span>        } else {<a name="line.398"></a>
-<span class="sourceLineNo">399</span>          promise.tryFailure(future.cause());<a name="line.399"></a>
-<span class="sourceLineNo">400</span>        }<a name="line.400"></a>
-<span class="sourceLineNo">401</span>      }<a name="line.401"></a>
-<span class="sourceLineNo">402</span>    });<a name="line.402"></a>
-<span class="sourceLineNo">403</span>  }<a name="line.403"></a>
-<span class="sourceLineNo">404</span><a name="line.404"></a>
-<span class="sourceLineNo">405</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.407"></a>
-<span class="sourceLineNo">408</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.408"></a>
-<span class="sourceLineNo">409</span>    StorageType[] storageTypes = locatedBlock.getStorageTypes();<a name="line.409"></a>
-<span class="sourceLineNo">410</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    boolean connectToDnViaHostname =<a name="line.411"></a>
-<span class="sourceLineNo">412</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.412"></a>
-<span class="sourceLineNo">413</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.413"></a>
-<span class="sourceLineNo">414</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.414"></a>
-<span class="sourceLineNo">415</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PBHelperClient.convert(blockCopy))<a name="line.417"></a>
-<span class="sourceLineNo">418</span>        .setToken(PBHelperClient.convert(locatedBlock.getBlockToken())))<a name="line.418"></a>
-<span class="sourceLineNo">419</span>      .setClientName(clientName).build();<a name="line.419"></a>
-<span class="sourceLineNo">420</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.420"></a>
-<span class="sourceLineNo">421</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.421"></a>
-<span class="sourceLineNo">422</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.423"></a>
-<span class="sourceLineNo">424</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.424"></a>
-<span class="sourceLineNo">425</span>        .setRequestedChecksum(checksumProto)<a name="line.425"></a>
-<span class="sourceLineNo">426</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      StorageType storageType = storageTypes[i];<a name="line.430"></a>
-<span class="sourceLineNo">431</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      futureList.add(promise);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.434"></a>
-<span class="sourceLineNo">435</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>            @Override<a name="line.437"></a>
-<span class="sourceLineNo">438</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.438"></a>
-<span class="sourceLineNo">439</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.439"></a>
-<span class="sourceLineNo">440</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.440"></a>
-<span class="sourceLineNo">441</span>              // a null handler.<a name="line.441"></a>
-<span class="sourceLineNo">442</span>            }<a name="line.442"></a>
-<span class="sourceLineNo">443</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.443"></a>
-<span class="sourceLineNo">444</span><a name="line.444"></a>
-<span class="sourceLineNo">445</span>            @Override<a name="line.445"></a>
-<span class="sourceLineNo">446</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.446"></a>
-<span class="sourceLineNo">447</span>              if (future.isSuccess()) {<a name="line.447"></a>
-<span class="sourceLineNo">448</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.448"></a>
-<span class="sourceLineNo">449</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.449"></a>
-<span class="sourceLineNo">450</span>              } else {<a name="line.450"></a>
-<span class="sourceLineNo">451</span>                promise.tryFailure(future.cause());<a name="line.451"></a>
-<span class="sourceLineNo">452</span>              }<a name="line.452"></a>
-<span class="sourceLineNo">453</span>            }<a name="line.453"></a>
-<span class="sourceLineNo">454</span>          });<a name="line.454"></a>
-<span class="sourceLineNo">455</span>    }<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    return futureList;<a name="line.456"></a>
-<span class="sourceLineNo">457</span>  }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  public static class NameNodeException extends IOException {<a name="line.462"></a>
-<span class="sourceLineNo">463</span><a name="line.463"></a>
-<span class="sourceLineNo">464</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>    public NameNodeException(Throwable cause) {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      super(cause);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    }<a name="line.468"></a>
-<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
-<span class="sourceLineNo">470</span><a name="line.470"></a>
-<span class="sourceLineNo">471</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.471"></a>
-<span class="sourceLineNo">472</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.472"></a>
-<span class="sourceLineNo">473</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>    Configuration conf = dfs.getConf();<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.475"></a>
-<span class="sourceLineNo">476</span>    DFSClient client = dfs.getClient();<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    String clientName = client.getClientName();<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    ClientProtocol namenode = client.getNamenode();<a name="line.478"></a>
-<span class="sourceLineNo">479</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.479"></a>
-<span class="sourceLineNo">480</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.481"></a>
-<span class="sourceLineNo">482</span>    for (int retry = 0;; retry++) {<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      HdfsFileStatus stat;<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      try {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.485"></a>
-<span class="sourceLineNo">486</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.486"></a>
-<span class="sourceLineNo">487</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.487"></a>
-<span class="sourceLineNo">488</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.488"></a>
-<span class="sourceLineNo">489</span>      } catch (Exception e) {<a name="line.489"></a>
-<span class="sourceLineNo">490</span>        if (e instanceof RemoteException) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>          throw (RemoteException) e;<a name="line.491"></a>
-<span class="sourceLineNo">492</span>        } else {<a name="line.492"></a>
-<span class="sourceLineNo">493</span>          throw new NameNodeException(e);<a name="line.493"></a>
-<span class="sourceLineNo">494</span>        }<a name="line.494"></a>
-<span class="sourceLineNo">495</span>      }<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      beginFileLease(client, stat.getFileId());<a name="line.496"></a>
-<span class="sourceLineNo">497</span>      boolean succ = false;<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      LocatedBlock locatedBlock = null;<a name="line.498"></a>
-<span class="sourceLineNo">499</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      try {<a name="line.500"></a>
-<span class="sourceLineNo">501</span>        DataChecksum summer = createChecksum(client);<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        locatedBlock = namenode.addBlock(src, client.getClientName(), null, excludesNodes,<a name="line.502"></a>
-<span class="sourceLineNo">503</span>          stat.getFileId(), null, null);<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.505"></a>
-<span class="sourceLineNo">506</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          try {<a name="line.508"></a>
-<span class="sourceLineNo">509</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          } catch (Exception e) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>            // exclude the broken DN next time<a name="line.511"></a>
-<span class="sourceLineNo">512</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.512"></a>
-<span class="sourceLineNo">513</span>            throw e;<a name="line.513"></a>
-<span class="sourceLineNo">514</span>          }<a name="line.514"></a>
-<span class="sourceLineNo">515</span>        }<a name="line.515"></a>
-<span class="sourceLineNo">516</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.516"></a>
-<span class="sourceLineNo">517</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.517"></a>
-<span class="sourceLineNo">518</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.518"></a>
-<span class="sourceLineNo">519</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.519"></a>
-<span class="sourceLineNo">520</span>        succ = true;<a name="line.520"></a>
-<span class="sourceLineNo">521</span>        return output;<a name="line.521"></a>
-<span class="sourceLineNo">522</span>      } catch (RemoteException e) {<a name="line.522"></a>
-<span class="sourceLineNo">523</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.523"></a>
-<span class="sourceLineNo">524</span>        if (shouldRetryCreate(e)) {<a name="line.524"></a>
-<span class="sourceLineNo">525</span>          if (retry &gt;= createMaxRetries) {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>            throw e.unwrapRemoteException();<a name="line.526"></a>
-<span class="sourceLineNo">527</span>          }<a name="line.527"></a>
-<span class="sourceLineNo">528</span>        } else {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>          throw e.unwrapRemoteException();<a name="line.529"></a>
-<span class="sourceLineNo">530</span>        }<a name="line.530"></a>
-<span class="sourceLineNo">531</span>      } catch (IOException e) {<a name="line.531"></a>
-<span class="sourceLineNo">532</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.532"></a>
-<span class="sourceLineNo">533</span>        if (retry &gt;= createMaxRetries) {<a name="line.533"></a>
-<span class="sourceLineNo">534</span>          throw e;<a name="line.534"></a>
-<span class="sourceLineNo">535</span>        }<a name="line.535"></a>
-<span class="sourceLineNo">536</span>        // overwrite the old broken file.<a name="line.536"></a>
-<span class="sourceLineNo">537</span>        overwrite = true;<a name="line.537"></a>
-<span class="sourceLineNo">538</span>        try {<a name="line.538"></a>
-<span class="sourceLineNo">539</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.539"></a>
-<span class="sourceLineNo">540</span>        } catch (InterruptedException ie) {<a name="line.540"></a>
-<span class="sourceLineNo">541</span>          throw new InterruptedIOException();<a name="line.541"></a>
-<span class="sourceLineNo">542</span>        }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>      } finally {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>        if (!succ) {<a name="line.544"></a>
-<span class="sourceLineNo">545</span>          if (futureList != null) {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span><a name="line.548"></a>
-<span class="sourceLineNo">549</span>                @Override<a name="line.549"></a>
-<span class="sourceLineNo">550</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.550"></a>
-<span class="sourceLineNo">551</span>                  if (future.isSuccess()) {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>                    future.getNow().close();<a name="line.552"></a>
-<span class="sourceLineNo">553</span>                  }<a name="line.553"></a>
-<span class="sourceLineNo">554</span>                }<a name="line.554"></a>
-<span class="sourceLineNo">555</span>              });<a name="line.555"></a>
-<span class="sourceLineNo">556</span>            }<a name="line.556"></a>
-<span class="sourceLineNo">557</span>          }<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          endFileLease(client, stat.getFileId());<a name="line.558"></a>
-<span class="sourceLineNo">559</span>        }<a name="line.559"></a>
-<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
-<span class="sourceLineNo">562</span>  }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>  /**<a name="line.564"></a>
-<span class="sourceLineNo">565</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.565"></a>
-<span class="sourceLineNo">566</span>   * inside an {@link EventLoop}.<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   */<a name="line.567"></a>
-<span class="sourceLineNo">568</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.568"></a>
-<span class="sourceLineNo">569</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.569"></a>
-<span class="sourceLineNo">570</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.571"></a>
-<span class="sourceLineNo">572</span><a name="line.572"></a>
-<span class="sourceLineNo">573</span>      @Override<a name="line.573"></a>
-<span class="sourceLineNo">574</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.574"></a>
-<span class="sourceLineNo">575</span>          throws IOException, UnresolvedLinkException {<a name="line.575"></a>
-<span class="sourceLineNo">576</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.576"></a>
-<span class="sourceLineNo">577</span>          blockSize, eventLoopGroup, channelClass);<a name="line.577"></a>
+<span class="sourceLineNo">290</span>  static {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    try {<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      LEASE_MANAGER = createLeaseManager();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      DFS_CLIENT_ADAPTOR = createDFSClientAdaptor();<a name="line.293"></a>
+<span class="sourceLineNo">294</span>      FILE_CREATOR = createFileCreator();<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    } catch (Exception e) {<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      String msg = "Couldn't properly initialize access to HDFS internals. Please " +<a name="line.296"></a>
+<span class="sourceLineNo">297</span>          "update your WAL Provider to not make use of the 'asyncfs' provider. See " +<a name="line.297"></a>
+<span class="sourceLineNo">298</span>          "HBASE-16110 for more information.";<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      LOG.error(msg, e);<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      throw new Error(msg, e);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    }<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
+<span class="sourceLineNo">303</span><a name="line.303"></a>
+<span class="sourceLineNo">304</span>  static void beginFileLease(DFSClient client, long inodeId) {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    LEASE_MANAGER.begin(client, inodeId);<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  }<a name="line.306"></a>
+<span class="sourceLineNo">307</span><a name="line.307"></a>
+<span class="sourceLineNo">308</span>  static void endFileLease(DFSClient client, long inodeId) {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    LEASE_MANAGER.end(client, inodeId);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  static DataChecksum createChecksum(DFSClient client) {<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    return client.getConf().createChecksum(null);<a name="line.313"></a>
+<span class="sourceLineNo">314</span>  }<a name="line.314"></a>
+<span class="sourceLineNo">315</span><a name="line.315"></a>
+<span class="sourceLineNo">316</span>  static Status getStatus(PipelineAckProto ack) {<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    List&lt;Integer&gt; flagList = ack.getFlagList();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    Integer headerFlag;<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    if (flagList.isEmpty()) {<a name="line.319"></a>
+<span class="sourceLineNo">320</span>      Status reply = ack.getReply(0);<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      headerFlag = PipelineAck.combineHeader(ECN.DISABLED, reply);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    } else {<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      headerFlag = flagList.get(0);<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    }<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    return PipelineAck.getStatusFromHeader(headerFlag);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  private static void processWriteBlockResponse(Channel channel, DatanodeInfo dnInfo,<a name="line.328"></a>
+<span class="sourceLineNo">329</span>      Promise&lt;Channel&gt; promise, int timeoutMs) {<a name="line.329"></a>
+<span class="sourceLineNo">330</span>    channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS),<a name="line.330"></a>
+<span class="sourceLineNo">331</span>      new ProtobufVarint32FrameDecoder(),<a name="line.331"></a>
+<span class="sourceLineNo">332</span>      new ProtobufDecoder(BlockOpResponseProto.getDefaultInstance()),<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      new SimpleChannelInboundHandler&lt;BlockOpResponseProto&gt;() {<a name="line.333"></a>
+<span class="sourceLineNo">334</span><a name="line.334"></a>
+<span class="sourceLineNo">335</span>        @Override<a name="line.335"></a>
+<span class="sourceLineNo">336</span>        protected void channelRead0(ChannelHandlerContext ctx, BlockOpResponseProto resp)<a name="line.336"></a>
+<span class="sourceLineNo">337</span>            throws Exception {<a name="line.337"></a>
+<span class="sourceLineNo">338</span>          Status pipelineStatus = resp.getStatus();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>          if (PipelineAck.isRestartOOBStatus(pipelineStatus)) {<a name="line.339"></a>
+<span class="sourceLineNo">340</span>            throw new IOException("datanode " + dnInfo + " is restarting");<a name="line.340"></a>
+<span class="sourceLineNo">341</span>          }<a name="line.341"></a>
+<span class="sourceLineNo">342</span>          String logInfo = "ack with firstBadLink as " + resp.getFirstBadLink();<a name="line.342"></a>
+<span class="sourceLineNo">343</span>          if (resp.getStatus() != Status.SUCCESS) {<a name="line.343"></a>
+<span class="sourceLineNo">344</span>            if (resp.getStatus() == Status.ERROR_ACCESS_TOKEN) {<a name="line.344"></a>
+<span class="sourceLineNo">345</span>              throw new InvalidBlockTokenException("Got access token error" + ", status message " +<a name="line.345"></a>
+<span class="sourceLineNo">346</span>                  resp.getMessage() + ", " + logInfo);<a name="line.346"></a>
+<span class="sourceLineNo">347</span>            } else {<a name="line.347"></a>
+<span class="sourceLineNo">348</span>              throw new IOException("Got error" + ", status=" + resp.getStatus().name() +<a name="line.348"></a>
+<span class="sourceLineNo">349</span>                  ", status message " + resp.getMessage() + ", " + logInfo);<a name="line.349"></a>
+<span class="sourceLineNo">350</span>            }<a name="line.350"></a>
+<span class="sourceLineNo">351</span>          }<a name="line.351"></a>
+<span class="sourceLineNo">352</span>          // success<a name="line.352"></a>
+<span class="sourceLineNo">353</span>          ChannelPipeline p = ctx.pipeline();<a name="line.353"></a>
+<span class="sourceLineNo">354</span>          for (ChannelHandler handler; (handler = p.removeLast()) != null;) {<a name="line.354"></a>
+<span class="sourceLineNo">355</span>            // do not remove all handlers because we may have wrap or unwrap handlers at the header<a name="line.355"></a>
+<span class="sourceLineNo">356</span>            // of pipeline.<a name="line.356"></a>
+<span class="sourceLineNo">357</span>            if (handler instanceof IdleStateHandler) {<a name="line.357"></a>
+<span class="sourceLineNo">358</span>              break;<a name="line.358"></a>
+<span class="sourceLineNo">359</span>            }<a name="line.359"></a>
+<span class="sourceLineNo">360</span>          }<a name="line.360"></a>
+<span class="sourceLineNo">361</span>          // Disable auto read here. Enable it after we setup the streaming pipeline in<a name="line.361"></a>
+<span class="sourceLineNo">362</span>          // FanOutOneBLockAsyncDFSOutput.<a name="line.362"></a>
+<span class="sourceLineNo">363</span>          ctx.channel().config().setAutoRead(false);<a name="line.363"></a>
+<span class="sourceLineNo">364</span>          promise.trySuccess(ctx.channel());<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        }<a name="line.365"></a>
+<span class="sourceLineNo">366</span><a name="line.366"></a>
+<span class="sourceLineNo">367</span>        @Override<a name="line.367"></a>
+<span class="sourceLineNo">368</span>        public void channelInactive(ChannelHandlerContext ctx) throws Exception {<a name="line.368"></a>
+<span class="sourceLineNo">369</span>          promise.tryFailure(new IOException("connection to " + dnInfo + " is closed"));<a name="line.369"></a>
+<span class="sourceLineNo">370</span>        }<a name="line.370"></a>
+<span class="sourceLineNo">371</span><a name="line.371"></a>
+<span class="sourceLineNo">372</span>        @Override<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {<a name="line.373"></a>
+<span class="sourceLineNo">374</span>          if (evt instanceof IdleStateEvent &amp;&amp; ((IdleStateEvent) evt).state() == READER_IDLE) {<a name="line.374"></a>
+<span class="sourceLineNo">375</span>            promise<a name="line.375"></a>
+<span class="sourceLineNo">376</span>                .tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response"));<a name="line.376"></a>
+<span class="sourceLineNo">377</span>          } else {<a name="line.377"></a>
+<span class="sourceLineNo">378</span>            super.userEventTriggered(ctx, evt);<a name="line.378"></a>
+<span class="sourceLineNo">379</span>          }<a name="line.379"></a>
+<span class="sourceLineNo">380</span>        }<a name="line.380"></a>
+<span class="sourceLineNo">381</span><a name="line.381"></a>
+<span class="sourceLineNo">382</span>        @Override<a name="line.382"></a>
+<span class="sourceLineNo">383</span>        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {<a name="line.383"></a>
+<span class="sourceLineNo">384</span>          promise.tryFailure(cause);<a name="line.384"></a>
+<span class="sourceLineNo">385</span>        }<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      });<a name="line.386"></a>
+<span class="sourceLineNo">387</span>  }<a name="line.387"></a>
+<span class="sourceLineNo">388</span><a name="line.388"></a>
+<span class="sourceLineNo">389</span>  private static void requestWriteBlock(Channel channel, StorageType storageType,<a name="line.389"></a>
+<span class="sourceLineNo">390</span>      OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException {<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    OpWriteBlockProto proto =<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();<a name="line.392"></a>
+<span class="sourceLineNo">393</span>    int protoLen = proto.getSerializedSize();<a name="line.393"></a>
+<span class="sourceLineNo">394</span>    ByteBuf buffer =<a name="line.394"></a>
+<span class="sourceLineNo">395</span>      channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>    buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);<a name="line.396"></a>
+<span class="sourceLineNo">397</span>    buffer.writeByte(Op.WRITE_BLOCK.code);<a name="line.397"></a>
+<span class="sourceLineNo">398</span>    proto.writeDelimitedTo(new ByteBufOutputStream(buffer));<a name="line.398"></a>
+<span class="sourceLineNo">399</span>    channel.writeAndFlush(buffer);<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  }<a name="line.400"></a>
+<span class="sourceLineNo">401</span><a name="line.401"></a>
+<span class="sourceLineNo">402</span>  private static void initialize(Configuration conf, Channel channel, DatanodeInfo dnInfo,<a name="line.402"></a>
+<span class="sourceLineNo">403</span>      StorageType storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder, int timeoutMs,<a name="line.403"></a>
+<span class="sourceLineNo">404</span>      DFSClient client, Token&lt;BlockTokenIdentifier&gt; accessToken, Promise&lt;Channel&gt; promise)<a name="line.404"></a>
+<span class="sourceLineNo">405</span>      throws IOException {<a name="line.405"></a>
+<span class="sourceLineNo">406</span>    Promise&lt;Void&gt; saslPromise = channel.eventLoop().newPromise();<a name="line.406"></a>
+<span class="sourceLineNo">407</span>    trySaslNegotiate(conf, channel, dnInfo, timeoutMs, client, accessToken, saslPromise);<a name="line.407"></a>
+<span class="sourceLineNo">408</span>    saslPromise.addListener(new FutureListener&lt;Void&gt;() {<a name="line.408"></a>
+<span class="sourceLineNo">409</span><a name="line.409"></a>
+<span class="sourceLineNo">410</span>      @Override<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      public void operationComplete(Future&lt;Void&gt; future) throws Exception {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>        if (future.isSuccess()) {<a name="line.412"></a>
+<span class="sourceLineNo">413</span>          // setup response processing pipeline first, then send request.<a name="line.413"></a>
+<span class="sourceLineNo">414</span>          processWriteBlockResponse(channel, dnInfo, promise, timeoutMs);<a name="line.414"></a>
+<span class="sourceLineNo">415</span>          requestWriteBlock(channel, storageType, writeBlockProtoBuilder);<a name="line.415"></a>
+<span class="sourceLineNo">416</span>        } else {<a name="line.416"></a>
+<span class="sourceLineNo">417</span>          promise.tryFailure(future.cause());<a name="line.417"></a>
+<span class="sourceLineNo">418</span>        }<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      }<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    });<a name="line.420"></a>
+<span class="sourceLineNo">421</span>  }<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>  private static List&lt;Future&lt;Channel&gt;&gt; connectToDataNodes(Configuration conf, DFSClient client,<a name="line.423"></a>
+<span class="sourceLineNo">424</span>      String clientName, LocatedBlock locatedBlock, long maxBytesRcvd, long latestGS,<a name="line.424"></a>
+<span class="sourceLineNo">425</span>      BlockConstructionStage stage, DataChecksum summer, EventLoopGroup eventLoopGroup,<a name="line.425"></a>
+<span class="sourceLineNo">426</span>      Class&lt;? extends Channel&gt; channelClass) {<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    StorageType[] storageTypes = locatedBlock.getStorageTypes();<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    DatanodeInfo[] datanodeInfos = locatedBlock.getLocations();<a name="line.428"></a>
+<span class="sourceLineNo">429</span>    boolean connectToDnViaHostname =<a name="line.429"></a>
+<span class="sourceLineNo">430</span>        conf.getBoolean(DFS_CLIENT_USE_DN_HOSTNAME, DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);<a name="line.430"></a>
+<span class="sourceLineNo">431</span>    int timeoutMs = conf.getInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, READ_TIMEOUT);<a name="line.431"></a>
+<span class="sourceLineNo">432</span>    ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock());<a name="line.432"></a>
+<span class="sourceLineNo">433</span>    blockCopy.setNumBytes(locatedBlock.getBlockSize());<a name="line.433"></a>
+<span class="sourceLineNo">434</span>    ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()<a name="line.434"></a>
+<span class="sourceLineNo">435</span>      .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PBHelperClient.convert(blockCopy))<a name="line.435"></a>
+<span class="sourceLineNo">436</span>        .setToken(PBHelperClient.convert(locatedBlock.getBlockToken())))<a name="line.436"></a>
+<span class="sourceLineNo">437</span>      .setClientName(clientName).build();<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    ChecksumProto checksumProto = DataTransferProtoUtil.toProto(summer);<a name="line.438"></a>
+<span class="sourceLineNo">439</span>    OpWriteBlockProto.Builder writeBlockProtoBuilder = OpWriteBlockProto.newBuilder()<a name="line.439"></a>
+<span class="sourceLineNo">440</span>        .setHeader(header).setStage(OpWriteBlockProto.BlockConstructionStage.valueOf(stage.name()))<a name="line.440"></a>
+<span class="sourceLineNo">441</span>        .setPipelineSize(1).setMinBytesRcvd(locatedBlock.getBlock().getNumBytes())<a name="line.441"></a>
+<span class="sourceLineNo">442</span>        .setMaxBytesRcvd(maxBytesRcvd).setLatestGenerationStamp(latestGS)<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        .setRequestedChecksum(checksumProto)<a name="line.443"></a>
+<span class="sourceLineNo">444</span>        .setCachingStrategy(CachingStrategyProto.newBuilder().setDropBehind(true).build());<a name="line.444"></a>
+<span class="sourceLineNo">445</span>    List&lt;Future&lt;Channel&gt;&gt; futureList = new ArrayList&lt;&gt;(datanodeInfos.length);<a name="line.445"></a>
+<span class="sourceLineNo">446</span>    for (int i = 0; i &lt; datanodeInfos.length; i++) {<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      DatanodeInfo dnInfo = datanodeInfos[i];<a name="line.447"></a>
+<span class="sourceLineNo">448</span>      StorageType storageType = storageTypes[i];<a name="line.448"></a>
+<span class="sourceLineNo">449</span>      Promise&lt;Channel&gt; promise = eventLoopGroup.next().newPromise();<a name="line.449"></a>
+<span class="sourceLineNo">450</span>      futureList.add(promise);<a name="line.450"></a>
+<span class="sourceLineNo">451</span>      String dnAddr = dnInfo.getXferAddr(connectToDnViaHostname);<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      new Bootstrap().group(eventLoopGroup).channel(channelClass)<a name="line.452"></a>
+<span class="sourceLineNo">453</span>          .option(CONNECT_TIMEOUT_MILLIS, timeoutMs).handler(new ChannelInitializer&lt;Channel&gt;() {<a name="line.453"></a>
+<span class="sourceLineNo">454</span><a name="line.454"></a>
+<span class="sourceLineNo">455</span>            @Override<a name="line.455"></a>
+<span class="sourceLineNo">456</span>            protected void initChannel(Channel ch) throws Exception {<a name="line.456"></a>
+<span class="sourceLineNo">457</span>              // we need to get the remote address of the channel so we can only move on after<a name="line.457"></a>
+<span class="sourceLineNo">458</span>              // channel connected. Leave an empty implementation here because netty does not allow<a name="line.458"></a>
+<span class="sourceLineNo">459</span>              // a null handler.<a name="line.459"></a>
+<span class="sourceLineNo">460</span>            }<a name="line.460"></a>
+<span class="sourceLineNo">461</span>          }).connect(NetUtils.createSocketAddr(dnAddr)).addListener(new ChannelFutureListener() {<a name="line.461"></a>
+<span class="sourceLineNo">462</span><a name="line.462"></a>
+<span class="sourceLineNo">463</span>            @Override<a name="line.463"></a>
+<span class="sourceLineNo">464</span>            public void operationComplete(ChannelFuture future) throws Exception {<a name="line.464"></a>
+<span class="sourceLineNo">465</span>              if (future.isSuccess()) {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>                initialize(conf, future.channel(), dnInfo, storageType, writeBlockProtoBuilder,<a name="line.466"></a>
+<span class="sourceLineNo">467</span>                  timeoutMs, client, locatedBlock.getBlockToken(), promise);<a name="line.467"></a>
+<span class="sourceLineNo">468</span>              } else {<a name="line.468"></a>
+<span class="sourceLineNo">469</span>                promise.tryFailure(future.cause());<a name="line.469"></a>
+<span class="sourceLineNo">470</span>              }<a name="line.470"></a>
+<span class="sourceLineNo">471</span>            }<a name="line.471"></a>
+<span class="sourceLineNo">472</span>          });<a name="line.472"></a>
+<span class="sourceLineNo">473</span>    }<a name="line.473"></a>
+<span class="sourceLineNo">474</span>    return futureList;<a name="line.474"></a>
+<span class="sourceLineNo">475</span>  }<a name="line.475"></a>
+<span class="sourceLineNo">476</span><a name="line.476"></a>
+<span class="sourceLineNo">477</span>  /**<a name="line.477"></a>
+<span class="sourceLineNo">478</span>   * Exception other than RemoteException thrown when calling create on namenode<a name="line.478"></a>
+<span class="sourceLineNo">479</span>   */<a name="line.479"></a>
+<span class="sourceLineNo">480</span>  public static class NameNodeException extends IOException {<a name="line.480"></a>
+<span class="sourceLineNo">481</span><a name="line.481"></a>
+<span class="sourceLineNo">482</span>    private static final long serialVersionUID = 3143237406477095390L;<a name="line.482"></a>
+<span class="sourceLineNo">483</span><a name="line.483"></a>
+<span class="sourceLineNo">484</span>    public NameNodeException(Throwable cause) {<a name="line.484"></a>
+<span class="sourceLineNo">485</span>      super(cause);<a name="line.485"></a>
+<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
+<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
+<span class="sourceLineNo">488</span><a name="line.488"></a>
+<span class="sourceLineNo">489</span>  private static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, String src,<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.490"></a>
+<span class="sourceLineNo">491</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.491"></a>
+<span class="sourceLineNo">492</span>    Configuration conf = dfs.getConf();<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    FSUtils fsUtils = FSUtils.getInstance(dfs, conf);<a name="line.493"></a>
+<span class="sourceLineNo">494</span>    DFSClient client = dfs.getClient();<a name="line.494"></a>
+<span class="sourceLineNo">495</span>    String clientName = client.getClientName();<a name="line.495"></a>
+<span class="sourceLineNo">496</span>    ClientProtocol namenode = client.getNamenode();<a name="line.496"></a>
+<span class="sourceLineNo">497</span>    int createMaxRetries = conf.getInt(ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES,<a name="line.497"></a>
+<span class="sourceLineNo">498</span>      DEFAULT_ASYNC_DFS_OUTPUT_CREATE_MAX_RETRIES);<a name="line.498"></a>
+<span class="sourceLineNo">499</span>    DatanodeInfo[] excludesNodes = EMPTY_DN_ARRAY;<a name="line.499"></a>
+<span class="sourceLineNo">500</span>    for (int retry = 0;; retry++) {<a name="line.500"></a>
+<span class="sourceLineNo">501</span>      HdfsFileStatus stat;<a name="line.501"></a>
+<span class="sourceLineNo">502</span>      try {<a name="line.502"></a>
+<span class="sourceLineNo">503</span>        stat = FILE_CREATOR.create(namenode, src,<a name="line.503"></a>
+<span class="sourceLineNo">504</span>          FsPermission.getFileDefault().applyUMask(FsPermission.getUMask(conf)), clientName,<a name="line.504"></a>
+<span class="sourceLineNo">505</span>          new EnumSetWritable&lt;&gt;(overwrite ? EnumSet.of(CREATE, OVERWRITE) : EnumSet.of(CREATE)),<a name="line.505"></a>
+<span class="sourceLineNo">506</span>          createParent, replication, blockSize, CryptoProtocolVersion.supported());<a name="line.506"></a>
+<span class="sourceLineNo">507</span>      } catch (Exception e) {<a name="line.507"></a>
+<span class="sourceLineNo">508</span>        if (e instanceof RemoteException) {<a name="line.508"></a>
+<span class="sourceLineNo">509</span>          throw (RemoteException) e;<a name="line.509"></a>
+<span class="sourceLineNo">510</span>        } else {<a name="line.510"></a>
+<span class="sourceLineNo">511</span>          throw new NameNodeException(e);<a name="line.511"></a>
+<span class="sourceLineNo">512</span>        }<a name="line.512"></a>
+<span class="sourceLineNo">513</span>      }<a name="line.513"></a>
+<span class="sourceLineNo">514</span>      beginFileLease(client, stat.getFileId());<a name="line.514"></a>
+<span class="sourceLineNo">515</span>      boolean succ = false;<a name="line.515"></a>
+<span class="sourceLineNo">516</span>      LocatedBlock locatedBlock = null;<a name="line.516"></a>
+<span class="sourceLineNo">517</span>      List&lt;Future&lt;Channel&gt;&gt; futureList = null;<a name="line.517"></a>
+<span class="sourceLineNo">518</span>      try {<a name="line.518"></a>
+<span class="sourceLineNo">519</span>        DataChecksum summer = createChecksum(client);<a name="line.519"></a>
+<span class="sourceLineNo">520</span>        locatedBlock = namenode.addBlock(src, client.getClientName(), null, excludesNodes,<a name="line.520"></a>
+<span class="sourceLineNo">521</span>          stat.getFileId(), null, null);<a name="line.521"></a>
+<span class="sourceLineNo">522</span>        List&lt;Channel&gt; datanodeList = new ArrayList&lt;&gt;();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>        futureList = connectToDataNodes(conf, client, clientName, locatedBlock, 0L, 0L,<a name="line.523"></a>
+<span class="sourceLineNo">524</span>          PIPELINE_SETUP_CREATE, summer, eventLoopGroup, channelClass);<a name="line.524"></a>
+<span class="sourceLineNo">525</span>        for (int i = 0, n = futureList.size(); i &lt; n; i++) {<a name="line.525"></a>
+<span class="sourceLineNo">526</span>          try {<a name="line.526"></a>
+<span class="sourceLineNo">527</span>            datanodeList.add(futureList.get(i).syncUninterruptibly().getNow());<a name="line.527"></a>
+<span class="sourceLineNo">528</span>          } catch (Exception e) {<a name="line.528"></a>
+<span class="sourceLineNo">529</span>            // exclude the broken DN next time<a name="line.529"></a>
+<span class="sourceLineNo">530</span>            excludesNodes = ArrayUtils.add(excludesNodes, locatedBlock.getLocations()[i]);<a name="line.530"></a>
+<span class="sourceLineNo">531</span>            throw e;<a name="line.531"></a>
+<span class="sourceLineNo">532</span>          }<a name="line.532"></a>
+<span class="sourceLineNo">533</span>        }<a name="line.533"></a>
+<span class="sourceLineNo">534</span>        Encryptor encryptor = createEncryptor(conf, stat, client);<a name="line.534"></a>
+<span class="sourceLineNo">535</span>        FanOutOneBlockAsyncDFSOutput output =<a name="line.535"></a>
+<span class="sourceLineNo">536</span>          new FanOutOneBlockAsyncDFSOutput(conf, fsUtils, dfs, client, namenode, clientName, src,<a name="line.536"></a>
+<span class="sourceLineNo">537</span>              stat.getFileId(), locatedBlock, encryptor, datanodeList, summer, ALLOC);<a name="line.537"></a>
+<span class="sourceLineNo">538</span>        succ = true;<a name="line.538"></a>
+<span class="sourceLineNo">539</span>        return output;<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      } catch (RemoteException e) {<a name="line.540"></a>
+<span class="sourceLineNo">541</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.541"></a>
+<span class="sourceLineNo">542</span>        if (shouldRetryCreate(e)) {<a name="line.542"></a>
+<span class="sourceLineNo">543</span>          if (retry &gt;= createMaxRetries) {<a name="line.543"></a>
+<span class="sourceLineNo">544</span>            throw e.unwrapRemoteException();<a name="line.544"></a>
+<span class="sourceLineNo">545</span>          }<a name="line.545"></a>
+<span class="sourceLineNo">546</span>        } else {<a name="line.546"></a>
+<span class="sourceLineNo">547</span>          throw e.unwrapRemoteException();<a name="line.547"></a>
+<span class="sourceLineNo">548</span>        }<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      } catch (IOException e) {<a name="line.549"></a>
+<span class="sourceLineNo">550</span>        LOG.warn("create fan-out dfs output {} failed, retry = {}", src, retry, e);<a name="line.550"></a>
+<span class="sourceLineNo">551</span>        if (retry &gt;= createMaxRetries) {<a name="line.551"></a>
+<span class="sourceLineNo">552</span>          throw e;<a name="line.552"></a>
+<span class="sourceLineNo">553</span>        }<a name="line.553"></a>
+<span class="sourceLineNo">554</span>        // overwrite the old broken file.<a name="line.554"></a>
+<span class="sourceLineNo">555</span>        overwrite = true;<a name="line.555"></a>
+<span class="sourceLineNo">556</span>        try {<a name="line.556"></a>
+<span class="sourceLineNo">557</span>          Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.557"></a>
+<span class="sourceLineNo">558</span>        } catch (InterruptedException ie) {<a name="line.558"></a>
+<span class="sourceLineNo">559</span>          throw new InterruptedIOException();<a name="line.559"></a>
+<span class="sourceLineNo">560</span>        }<a name="line.560"></a>
+<span class="sourceLineNo">561</span>      } finally {<a name="line.561"></a>
+<span class="sourceLineNo">562</span>        if (!succ) {<a name="line.562"></a>
+<span class="sourceLineNo">563</span>          if (futureList != null) {<a name="line.563"></a>
+<span class="sourceLineNo">564</span>            for (Future&lt;Channel&gt; f : futureList) {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>              f.addListener(new FutureListener&lt;Channel&gt;() {<a name="line.565"></a>
+<span class="sourceLineNo">566</span><a name="line.566"></a>
+<span class="sourceLineNo">567</span>                @Override<a name="line.567"></a>
+<span class="sourceLineNo">568</span>                public void operationComplete(Future&lt;Channel&gt; future) throws Exception {<a name="line.568"></a>
+<span class="sourceLineNo">569</span>                  if (future.isSuccess()) {<a name="line.569"></a>
+<span class="sourceLineNo">570</span>                    future.getNow().close();<a name="line.570"></a>
+<span class="sourceLineNo">571</span>                  }<a name="line.571"></a>
+<span class="sourceLineNo">572</span>                }<a name="line.572"></a>
+<span class="sourceLineNo">573</span>              });<a name="line.573"></a>
+<span class="sourceLineNo">574</span>            }<a name="line.574"></a>
+<span class="sourceLineNo">575</span>          }<a name="line.575"></a>
+<span class="sourceLineNo">576</span>          endFileLease(client, stat.getFileId());<a name="line.576"></a>
+<span class="sourceLineNo">577</span>        }<a name="line.577"></a>
 <span class="sourceLineNo">578</span>      }<a name="line.578"></a>
-<span class="sourceLineNo">579</span><a name="line.579"></a>
-<span class="sourceLineNo">580</span>      @Override<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>        throw new UnsupportedOperationException();<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      }<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }.resolve(dfs, f);<a name="line.584"></a>
-<span class="sourceLineNo">585</span>  }<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.587"></a>
-<span class="sourceLineNo">588</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.588"></a>
-<span class="sourceLineNo">589</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.589"></a>
-<span class="sourceLineNo">590</span>    // DFSOutputStream.newStreamForCreate.<a name="line.590"></a>
-<span class="sourceLineNo">591</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.591"></a>
-<span class="sourceLineNo">592</span>  }<a name="line.592"></a>
-<span class="sourceLineNo">593</span><a name="line.593"></a>
-<span class="sourceLineNo">594</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      ExtendedBlock block, long fileId) {<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    for (int retry = 0;; retry++) {<a name="line.596"></a>
-<span class="sourceLineNo">597</span>      try {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>          endFileLease(client, fileId);<a name="line.599"></a>
-<span class="sourceLineNo">600</span>          return;<a name="line.600"></a>
-<span class="sourceLineNo">601</span>        } else {<a name="line.601"></a>
-<span class="sourceLineNo">602</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.602"></a>
-<span class="sourceLineNo">603</span>        }<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      } catch (RemoteException e) {<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        IOException ioe = e.unwrapRemoteException();<a name="line.605"></a>
-<span class="sourceLineNo">606</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>          return;<a name="line.608"></a>
-<span class="sourceLineNo">609</span>        } else {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.610"></a>
-<span class="sourceLineNo">611</span>        }<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      } catch (Exception e) {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      }<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      sleepIgnoreInterrupt(retry);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    }<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  }<a name="line.617"></a>
-<span class="sourceLineNo">618</span><a name="line.618"></a>
-<span class="sourceLineNo">619</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.619"></a>
-<span class="sourceLineNo">620</span>    try {<a name="line.620"></a>
-<span class="sourceLineNo">621</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    } catch (InterruptedException e) {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>    }<a name="line.623"></a>
-<span class="sourceLineNo">624</span>  }<a name="line.624"></a>
-<span class="sourceLineNo">625</span>}<a name="line.625"></a>
+<span class="sourceLineNo">579</span>    }<a name="line.579"></a>
+<span class="sourceLineNo">580</span>  }<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>  /**<a name="line.582"></a>
+<span class="sourceLineNo">583</span>   * Create a {@link FanOutOneBlockAsyncDFSOutput}. The method maybe blocked so do not call it<a name="line.583"></a>
+<span class="sourceLineNo">584</span>   * inside an {@link EventLoop}.<a name="line.584"></a>
+<span class="sourceLineNo">585</span>   */<a name="line.585"></a>
+<span class="sourceLineNo">586</span>  public static FanOutOneBlockAsyncDFSOutput createOutput(DistributedFileSystem dfs, Path f,<a name="line.586"></a>
+<span class="sourceLineNo">587</span>      boolean overwrite, boolean createParent, short replication, long blockSize,<a name="line.587"></a>
+<span class="sourceLineNo">588</span>      EventLoopGroup eventLoopGroup, Class&lt;? extends Channel&gt; channelClass) throws IOException {<a name="line.588"></a>
+<span class="sourceLineNo">589</span>    return new FileSystemLinkResolver&lt;FanOutOneBlockAsyncDFSOutput&gt;() {<a name="line.589"></a>
+<span class="sourceLineNo">590</span><a name="line.590"></a>
+<span class="sourceLineNo">591</span>      @Override<a name="line.591"></a>
+<span class="sourceLineNo">592</span>      public FanOutOneBlockAsyncDFSOutput doCall(Path p)<a name="line.592"></a>
+<span class="sourceLineNo">593</span>          throws IOException, UnresolvedLinkException {<a name="line.593"></a>
+<span class="sourceLineNo">594</span>        return createOutput(dfs, p.toUri().getPath(), overwrite, createParent, replication,<a name="line.594"></a>
+<span class="sourceLineNo">595</span>          blockSize, eventLoopGroup, channelClass);<a name="line.595"></a>
+<span class="sourceLineNo">596</span>      }<a name="line.596"></a>
+<span class="sourceLineNo">597</span><a name="line.597"></a>
+<span class="sourceLineNo">598</span>      @Override<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      public FanOutOneBlockAsyncDFSOutput next(FileSystem fs, Path p) throws IOException {<a name="line.599"></a>
+<span class="sourceLineNo">600</span>        throw new UnsupportedOperationException();<a name="line.600"></a>
+<span class="sourceLineNo">601</span>      }<a name="line.601"></a>
+<span class="sourceLineNo">602</span>    }.resolve(dfs, f);<a name="line.602"></a>
+<span class="sourceLineNo">603</span>  }<a name="line.603"></a>
+<span class="sourceLineNo">604</span><a name="line.604"></a>
+<span class="sourceLineNo">605</span>  public static boolean shouldRetryCreate(RemoteException e) {<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    // RetryStartFileException is introduced in HDFS 2.6+, so here we can only use the class name.<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    // For exceptions other than this, we just throw it out. This is same with<a name="line.607"></a>
+<span class="sourceLineNo">608</span>    // DFSOutputStream.newStreamForCreate.<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    return e.getClassName().endsWith("RetryStartFileException");<a name="line.609"></a>
+<span class="sourceLineNo">610</span>  }<a name="line.610"></a>
+<span class="sourceLineNo">611</span><a name="line.611"></a>
+<span class="sourceLineNo">612</span>  static void completeFile(DFSClient client, ClientProtocol namenode, String src, String clientName,<a name="line.612"></a>
+<span class="sourceLineNo">613</span>      ExtendedBlock block, long fileId) {<a name="line.613"></a>
+<span class="sourceLineNo">614</span>    for (int retry = 0;; retry++) {<a name="line.614"></a>
+<span class="sourceLineNo">615</span>      try {<a name="line.615"></a>
+<span class="sourceLineNo">616</span>        if (namenode.complete(src, clientName, block, fileId)) {<a name="line.616"></a>
+<span class="sourceLineNo">617</span>          endFileLease(client, fileId);<a name="line.617"></a>
+<span class="sourceLineNo">618</span>          return;<a name="line.618"></a>
+<span class="sourceLineNo">619</span>        } else {<a name="line.619"></a>
+<span class="sourceLineNo">620</span>          LOG.warn("complete file " + src + " not finished, retry = " + retry);<a name="line.620"></a>
+<span class="sourceLineNo">621</span>        }<a name="line.621"></a>
+<span class="sourceLineNo">622</span>      } catch (RemoteException e) {<a name="line.622"></a>
+<span class="sourceLineNo">623</span>        IOException ioe = e.unwrapRemoteException();<a name="line.623"></a>
+<span class="sourceLineNo">624</span>        if (ioe instanceof LeaseExpiredException) {<a name="line.624"></a>
+<span class="sourceLineNo">625</span>          LOG.warn("lease for file " + src + " is expired, give up", e);<a name="line.625"></a>
+<span class="sourceLineNo">626</span>          return;<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        } else {<a name="line.627"></a>
+<span class="sourceLineNo">628</span>          LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.628"></a>
+<span class="sourceLineNo">629</span>        }<a name="line.629"></a>
+<span class="sourceLineNo">630</span>      } catch (Exception e) {<a name="line.630"></a>
+<span class="sourceLineNo">631</span>        LOG.warn("complete file " + src + " failed, retry = " + retry, e);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      }<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      sleepIgnoreInterrupt(retry);<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
+<span class="sourceLineNo">635</span>  }<a name="line.635"></a>
+<span class="sourceLineNo">636</span><a name="line.636"></a>
+<span class="sourceLineNo">637</span>  static void sleepIgnoreInterrupt(int retry) {<a name="line.637"></a>
+<span class="sourceLineNo">638</span>    try {<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      Thread.sleep(ConnectionUtils.getPauseTime(100, retry));<a name="line.639"></a>
+<span class="sourceLineNo">640</span>    } catch (InterruptedException e) {<a name="line.640"></a>
+<span class="sourceLineNo">641</span>    }<a name="line.641"></a>
+<span class="sourceLineNo">642</span>  }<a name="line.642"></a>
+<span class="sourceLineNo">643</span>}<a name="line.643"></a>
 
 
 
diff --git a/downloads.html b/downloads.html
index 3b7efff..6ca1f7c 100644
--- a/downloads.html
+++ b/downloads.html
@@ -465,7 +465,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/export_control.html b/export_control.html
index 1492f10..b9f15a4 100644
--- a/export_control.html
+++ b/export_control.html
@@ -197,7 +197,7 @@ for more details.</p>
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/index.html b/index.html
index 3a06e8a..0cc9c0c 100644
--- a/index.html
+++ b/index.html
@@ -275,7 +275,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/issue-tracking.html b/issue-tracking.html
index 960ab52..7c5bc6a 100644
--- a/issue-tracking.html
+++ b/issue-tracking.html
@@ -169,7 +169,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/mail-lists.html b/mail-lists.html
index 56e4da2..17bc622 100644
--- a/mail-lists.html
+++ b/mail-lists.html
@@ -229,7 +229,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/metrics.html b/metrics.html
index 61158ec..d7664ad 100644
--- a/metrics.html
+++ b/metrics.html
@@ -325,7 +325,7 @@ export HBASE_REGIONSERVER_OPTS=&quot;$HBASE_JMX_OPTS -Dcom.sun.management.jmxrem
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/old_news.html b/old_news.html
index a75d7c2..9b71ff8 100644
--- a/old_news.html
+++ b/old_news.html
@@ -316,7 +316,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/plugin-management.html b/plugin-management.html
index ae68086..283e734 100644
--- a/plugin-management.html
+++ b/plugin-management.html
@@ -207,8 +207,8 @@
 <td>3.0.1</td></tr>
 <tr class="a">
 <td>org.apache.maven.plugins</td>
-<td><a class="externalLink" href="http://maven.apache.org/enforcer/maven-enforcer-plugin">maven-enforcer-plugin</a></td>
-<td>1.4.1</td></tr>
+<td><a class="externalLink" href="https://maven.apache.org/enforcer/maven-enforcer-plugin/">maven-enforcer-plugin</a></td>
+<td>3.0.0-M2</td></tr>
 <tr class="b">
 <td>org.apache.maven.plugins</td>
 <td><a class="externalLink" href="https://maven.apache.org/surefire/maven-failsafe-plugin/">maven-failsafe-plugin</a></td>
@@ -321,7 +321,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/plugins.html b/plugins.html
index 68fb065..83f1270 100644
--- a/plugins.html
+++ b/plugins.html
@@ -248,7 +248,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/poweredbyhbase.html b/poweredbyhbase.html
index 8fb6484..d5b92b6 100644
--- a/poweredbyhbase.html
+++ b/poweredbyhbase.html
@@ -650,7 +650,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/project-info.html b/project-info.html
index e1b2ac3..5e7962f 100644
--- a/project-info.html
+++ b/project-info.html
@@ -210,7 +210,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/project-reports.html b/project-reports.html
index a52c136..bf17b81 100644
--- a/project-reports.html
+++ b/project-reports.html
@@ -186,7 +186,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/project-summary.html b/project-summary.html
index bd56bf5..f1772fa 100644
--- a/project-summary.html
+++ b/project-summary.html
@@ -212,7 +212,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/pseudo-distributed.html b/pseudo-distributed.html
index ce38ba5..3abd2f3 100644
--- a/pseudo-distributed.html
+++ b/pseudo-distributed.html
@@ -174,7 +174,7 @@ Running Apache HBase (TM) in pseudo-distributed mode
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/replication.html b/replication.html
index 0b269fc..6a406d1 100644
--- a/replication.html
+++ b/replication.html
@@ -169,7 +169,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/resources.html b/resources.html
index 536643b..33edcee 100644
--- a/resources.html
+++ b/resources.html
@@ -197,7 +197,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/source-repository.html b/source-repository.html
index 7650f07..861b1e2 100644
--- a/source-repository.html
+++ b/source-repository.html
@@ -180,7 +180,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/sponsors.html b/sponsors.html
index e93a754..d98955c 100644
--- a/sponsors.html
+++ b/sponsors.html
@@ -199,7 +199,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/supportingprojects.html b/supportingprojects.html
index 0cfb4cc..dac464e 100644
--- a/supportingprojects.html
+++ b/supportingprojects.html
@@ -390,7 +390,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/team-list.html b/team-list.html
index 63cc889..508661b 100644
--- a/team-list.html
+++ b/team-list.html
@@ -701,7 +701,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-15</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-03-17</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html
index 39e2546..a42b76c 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.RemoteShell.html
@@ -244,14 +244,14 @@ extends org.apache.hadoop.util.Shell.ShellCommandExecutor</pre>
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.util.Shell.ShellCommandExecutor</h3>
-<code>close, execute, getOutput, parseExecResult, toString</code></li>
+<code>close, execute, getOutput, getTimeoutInterval, parseExecResult, toString</code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.util.Shell">
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.util.Shell</h3>
-<code>appendScriptExtension, appendScriptExtension, checkIsBashSupported, checkWindowsCommandLineLength, execCommand, execCommand, execCommand, getCheckProcessIsAliveCommand, getEnvironment, getEnvironmentVariableRegex, getExitCode, getGetPermissionCommand, getGroupsCommand, getGroupsForUserCommand, getGroupsIDForUserCommand, getHadoopHome, getProcess, getQualifiedBin, getQualifiedBinPath, getReadlinkCommand, getRunScriptCommand, getSetOwnerCommand, getSetPermissionCommand, getSetPermiss [...]
+<code>appendScriptExtension, appendScriptExtension, checkIsBashSupported, checkWindowsCommandLineLength, destroyAllShellProcesses, execCommand, execCommand, execCommand, getAllShells, getCheckProcessIsAliveCommand, getEnvironment, getEnvironmentVariableRegex, getExitCode, getGetPermissionCommand, getGroupsCommand, getGroupsForUserCommand, getGroupsIDForUserCommand, getHadoopHome, getMemlockLimit, getProcess, getQualifiedBin, getQualifiedBinPath, getReadlinkCommand, getRunScriptCommand, g [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
diff --git a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.RemoteSudoShell.html b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.RemoteSudoShell.html
index 8f8d4f3..f711b20 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.RemoteSudoShell.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/HBaseClusterManager.RemoteSudoShell.html
@@ -225,14 +225,14 @@ extends org.apache.hadoop.util.Shell.ShellCommandExecutor</pre>
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.util.Shell.ShellCommandExecutor</h3>
-<code>close, execute, getOutput, parseExecResult, toString</code></li>
+<code>close, execute, getOutput, getTimeoutInterval, parseExecResult, toString</code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.util.Shell">
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.util.Shell</h3>
-<code>appendScriptExtension, appendScriptExtension, checkIsBashSupported, checkWindowsCommandLineLength, execCommand, execCommand, execCommand, getCheckProcessIsAliveCommand, getEnvironment, getEnvironmentVariableRegex, getExitCode, getGetPermissionCommand, getGroupsCommand, getGroupsForUserCommand, getGroupsIDForUserCommand, getHadoopHome, getProcess, getQualifiedBin, getQualifiedBinPath, getReadlinkCommand, getRunScriptCommand, getSetOwnerCommand, getSetPermissionCommand, getSetPermiss [...]
+<code>appendScriptExtension, appendScriptExtension, checkIsBashSupported, checkWindowsCommandLineLength, destroyAllShellProcesses, execCommand, execCommand, execCommand, getAllShells, getCheckProcessIsAliveCommand, getEnvironment, getEnvironmentVariableRegex, getExitCode, getGetPermissionCommand, getGroupsCommand, getGroupsForUserCommand, getGroupsIDForUserCommand, getHadoopHome, getMemlockLimit, getProcess, getQualifiedBin, getQualifiedBinPath, getReadlinkCommand, getRunScriptCommand, g [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
diff --git a/testdevapidocs/org/apache/hadoop/hbase/chaos/actions/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/chaos/actions/package-tree.html
index 9b7dde2..6423256 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/chaos/actions/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/chaos/actions/package-tree.html
@@ -159,8 +159,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.chaos.actions.<a href="../../../../../../org/apache/hadoop/hbase/chaos/actions/RollingBatchSuspendResumeRsAction.SuspendOrResume.html" title="enum in org.apache.hadoop.hbase.chaos.actions"><span class="typeNameLink">RollingBatchSuspendResumeRsAction.SuspendOrResume</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.chaos.actions.<a href="../../../../../../org/apache/hadoop/hbase/chaos/actions/RollingBatchRestartRsAction.KillOrStart.html" title="enum in org.apache.hadoop.hbase.chaos.actions"><span class="typeNameLink">RollingBatchRestartRsAction.KillOrStart</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.chaos.actions.<a href="../../../../../../org/apache/hadoop/hbase/chaos/actions/RollingBatchSuspendResumeRsAction.SuspendOrResume.html" title="enum in org.apache.hadoop.hbase.chaos.actions"><span class="typeNameLink">RollingBatchSuspendResumeRsAction.SuspendOrResume</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/http/TestHttpServer.MyGroupsProvider.html b/testdevapidocs/org/apache/hadoop/hbase/http/TestHttpServer.MyGroupsProvider.html
index ad3e4b6..a83bc8c 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/http/TestHttpServer.MyGroupsProvider.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/http/TestHttpServer.MyGroupsProvider.html
@@ -100,6 +100,9 @@ var activeTableTab = "activeTableTab";
 <li><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">java.lang.Object</a></li>
 <li>
 <ul class="inheritance">
+<li>org.apache.hadoop.conf.Configured</li>
+<li>
+<ul class="inheritance">
 <li>org.apache.hadoop.security.ShellBasedUnixGroupsMapping</li>
 <li>
 <ul class="inheritance">
@@ -109,12 +112,14 @@ var activeTableTab = "activeTableTab";
 </ul>
 </li>
 </ul>
+</li>
+</ul>
 <div class="description">
 <ul class="blockList">
 <li class="blockList">
 <dl>
 <dt>All Implemented Interfaces:</dt>
-<dd>org.apache.hadoop.security.GroupMappingServiceProvider</dd>
+<dd>org.apache.hadoop.conf.Configurable, org.apache.hadoop.security.GroupMappingServiceProvider</dd>
 </dl>
 <dl>
 <dt>Enclosing class:</dt>
@@ -149,6 +154,13 @@ extends org.apache.hadoop.security.ShellBasedUnixGroupsMapping</pre>
 </tr>
 </table>
 <ul class="blockList">
+<li class="blockList"><a name="fields.inherited.from.class.org.apache.hadoop.security.ShellBasedUnixGroupsMapping">
+<!--   -->
+</a>
+<h3>Fields inherited from class&nbsp;org.apache.hadoop.security.ShellBasedUnixGroupsMapping</h3>
+<code>LOG</code></li>
+</ul>
+<ul class="blockList">
 <li class="blockList"><a name="fields.inherited.from.class.org.apache.hadoop.security.GroupMappingServiceProvider">
 <!--   -->
 </a>
@@ -200,7 +212,14 @@ extends org.apache.hadoop.security.ShellBasedUnixGroupsMapping</pre>
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.security.ShellBasedUnixGroupsMapping</h3>
-<code>cacheGroupsAdd, cacheGroupsRefresh, createGroupExecutor, createGroupIDExecutor</code></li>
+<code>cacheGroupsAdd, cacheGroupsRefresh, createGroupExecutor, createGroupIDExecutor, getGroupsForUserCommand, getGroupsIDForUserCommand, resolveFullGroupNames, setConf</code></li>
+</ul>
+<ul class="blockList">
+<li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.conf.Configured">
+<!--   -->
+</a>
+<h3>Methods inherited from class&nbsp;org.apache.hadoop.conf.Configured</h3>
+<code>getConf</code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
diff --git a/testdevapidocs/org/apache/hadoop/hbase/http/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/http/package-tree.html
index aae0f88..d4aa0cc 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/http/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/http/package-tree.html
@@ -97,6 +97,15 @@
 </li>
 </ul>
 </li>
+<li type="circle">org.apache.hadoop.conf.Configured (implements org.apache.hadoop.conf.Configurable)
+<ul>
+<li type="circle">org.apache.hadoop.security.ShellBasedUnixGroupsMapping (implements org.apache.hadoop.security.GroupMappingServiceProvider)
+<ul>
+<li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/TestHttpServer.MyGroupsProvider.html" title="class in org.apache.hadoop.hbase.http"><span class="typeNameLink">TestHttpServer.MyGroupsProvider</span></a></li>
+</ul>
+</li>
+</ul>
+</li>
 <li type="circle">org.apache.hadoop.hbase.http.FilterInitializer
 <ul>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/TestGlobalFilter.RecordingFilter.Initializer.html" title="class in org.apache.hadoop.hbase.http"><span class="typeNameLink">TestGlobalFilter.RecordingFilter.Initializer</span></a></li>
@@ -118,11 +127,6 @@
 </li>
 </ul>
 </li>
-<li type="circle">org.apache.hadoop.security.ShellBasedUnixGroupsMapping (implements org.apache.hadoop.security.GroupMappingServiceProvider)
-<ul>
-<li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/TestHttpServer.MyGroupsProvider.html" title="class in org.apache.hadoop.hbase.http"><span class="typeNameLink">TestHttpServer.MyGroupsProvider</span></a></li>
-</ul>
-</li>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/TestGlobalFilter.RecordingFilter.html" title="class in org.apache.hadoop.hbase.http"><span class="typeNameLink">TestGlobalFilter.RecordingFilter</span></a> (implements javax.servlet.Filter)</li>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/TestHtmlQuoting.html" title="class in org.apache.hadoop.hbase.http"><span class="typeNameLink">TestHtmlQuoting</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/TestHttpRequestLog.html" title="class in org.apache.hadoop.hbase.http"><span class="typeNameLink">TestHttpRequestLog</span></a></li>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/io/TestFileLink.MyDistributedFileSystem.html b/testdevapidocs/org/apache/hadoop/hbase/io/TestFileLink.MyDistributedFileSystem.html
index 8bdf07c..7f2f952 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/io/TestFileLink.MyDistributedFileSystem.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/io/TestFileLink.MyDistributedFileSystem.html
@@ -151,14 +151,14 @@ extends org.apache.hadoop.hdfs.DistributedFileSystem</pre>
 <!--   -->
 </a>
 <h3>Nested classes/interfaces inherited from class&nbsp;org.apache.hadoop.hdfs.DistributedFileSystem</h3>
-<code>org.apache.hadoop.hdfs.DistributedFileSystem.DiskStatus</code></li>
+<code>org.apache.hadoop.hdfs.DistributedFileSystem.DiskStatus, org.apache.hadoop.hdfs.DistributedFileSystem.HdfsDataOutputStreamBuilder</code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="nested.classes.inherited.from.class.org.apache.hadoop.fs.FileSystem">
 <!--   -->
 </a>
 <h3>Nested classes/interfaces inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>org.apache.hadoop.fs.FileSystem.Statistics</code></li>
+<code>org.apache.hadoop.fs.FileSystem.DirectoryEntries, org.apache.hadoop.fs.FileSystem.Statistics</code></li>
 </ul>
 </li>
 </ul>
@@ -173,7 +173,7 @@ extends org.apache.hadoop.hdfs.DistributedFileSystem</pre>
 <!--   -->
 </a>
 <h3>Fields inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, LOG, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX</code></li>
+<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, LOG, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX, USER_HOME_PREFIX</code></li>
 </ul>
 </li>
 </ul>
@@ -221,14 +221,14 @@ extends org.apache.hadoop.hdfs.DistributedFileSystem</pre>
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.hdfs.DistributedFileSystem</h3>
-<code>access, addCacheDirective, addCacheDirective, addCachePool, addDelegationTokens, allowSnapshot, append, append, append, canonicalizeUri, close, concat, create, create, create, createEncryptionZone, createNonRecursive, createSnapshot, createSymlink, delete, deleteSnapshot, disallowSnapshot, finalizeUpgrade, fixRelativePart, getAclStatus, getAllStoragePolicies, getBytesWithFutureGenerationStamps, getCanonicalServiceName, getClient, getContentSummary, getCorruptBlocksCount, getDataNod [...]
+<code>access, addCacheDirective, addCacheDirective, addCachePool, addDelegationTokens, allowSnapshot, append, append, append, appendFile, canonicalizeUri, close, concat, create, create, create, createEncryptionZone, createFile, createNonRecursive, createSnapshot, createSymlink, delete, deleteSnapshot, disallowSnapshot, finalizeUpgrade, fixRelativePart, getAclStatus, getAllStoragePolicies, getBytesWithFutureGenerationStamps, getCanonicalServiceName, getClient, getContentSummary, getCorrup [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.fs.FileSystem">
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>append, append, areSymlinksEnabled, cancelDeleteOnExit, checkPath, clearStatistics, closeAll, closeAllForUGI, completeLocalOutput, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, create, createNewFile, createNonRecursive, createNonRecursive, createSnapshot, delete, deleteOnExit, enableSymlinks, exists, get, get, get, getAllSt [...]
+<code>append, append, areSymlinksEnabled, cancelDeleteOnExit, checkPath, clearStatistics, closeAll, closeAllForUGI, completeLocalOutput, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, create, createNewFile, createNonRecursive, createNonRecursive, createSnapshot, delete, deleteOnExit, enableSymlinks, exists, get, get, get, getAllSt [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.conf.Configured">
diff --git a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index d4d7436..f32920e 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -165,8 +165,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.CacheOnWriteType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">TestCacheOnWrite.CacheOnWriteType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/TagUsage.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">TagUsage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.CacheOnWriteType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">TestCacheOnWrite.CacheOnWriteType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
index fc9aa23..7a41d36 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -620,15 +620,15 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterManager.ServiceType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.Stat.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">IntegrationTestRegionReplicaPerf.Stat</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ResourceChecker.Phase.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ResourceChecker.Phase</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HBaseClusterManager.CommandProvider.Operation.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HBaseClusterManager.CommandProvider.Operation</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.ACTION.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">IntegrationTestDDLMasterFailover.ACTION</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/RESTApiClusterManager.Service.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">RESTApiClusterManager.Service</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/PerformanceEvaluation.Counter.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">PerformanceEvaluation.Counter</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ScanPerformanceEvaluation.ScanCounter.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ScanPerformanceEvaluation.ScanCounter</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/RESTApiClusterManager.RoleCommand.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">RESTApiClusterManager.RoleCommand</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterManager.ServiceType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterManager.ServiceType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ResourceChecker.Phase.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ResourceChecker.Phase</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/PerformanceEvaluation.Counter.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">PerformanceEvaluation.Counter</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/RESTApiClusterManager.Service.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">RESTApiClusterManager.Service</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.ACTION.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">IntegrationTestDDLMasterFailover.ACTION</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.Stat.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">IntegrationTestRegionReplicaPerf.Stat</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/procedure/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/procedure/package-tree.html
index c6bce5f..8ff917c 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/procedure/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/procedure/package-tree.html
@@ -81,14 +81,14 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Object</span></a>
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure2.Procedure&lt;TEnvironment&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;)
+<li type="circle">org.apache.hadoop.hbase.procedure.Procedure (implements java.util.concurrent.<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true" title="class or interface in java.util.concurrent">Callable</a>&lt;V&gt;, org.apache.hadoop.hbase.errorhandling.ForeignExceptionListener)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure.<a href="../../../../../org/apache/hadoop/hbase/procedure/TestProcedureDescriber.TestProcedure.html" title="class in org.apache.hadoop.hbase.procedure"><span class="typeNameLink">TestProcedureDescriber.TestProcedure</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure.<a href="../../../../../org/apache/hadoop/hbase/procedure/TestProcedure.LatchedProcedure.html" title="class in org.apache.hadoop.hbase.procedure"><span class="typeNameLink">TestProcedure.LatchedProcedure</span></a></li>
 </ul>
 </li>
-<li type="circle">org.apache.hadoop.hbase.procedure.Procedure (implements java.util.concurrent.<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Callable.html?is-external=true" title="class or interface in java.util.concurrent">Callable</a>&lt;V&gt;, org.apache.hadoop.hbase.errorhandling.ForeignExceptionListener)
+<li type="circle">org.apache.hadoop.hbase.procedure2.Procedure&lt;TEnvironment&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure.<a href="../../../../../org/apache/hadoop/hbase/procedure/TestProcedure.LatchedProcedure.html" title="class in org.apache.hadoop.hbase.procedure"><span class="typeNameLink">TestProcedure.LatchedProcedure</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure.<a href="../../../../../org/apache/hadoop/hbase/procedure/TestProcedureDescriber.TestProcedure.html" title="class in org.apache.hadoop.hbase.procedure"><span class="typeNameLink">TestProcedureDescriber.TestProcedure</span></a></li>
 </ul>
 </li>
 <li type="circle">org.apache.hadoop.hbase.procedure.ProcedureManager
diff --git a/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index 11555ca..21a35b5 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -241,8 +241,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.TestStateMachineProcedure.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">TestProcedureRecovery.TestStateMachineProcedure.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/TestStateMachineProcedure.TestSMProcedureState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">TestStateMachineProcedure.TestSMProcedureState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.TestStateMachineProcedure.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">TestProcedureRecovery.TestStateMachineProcedure.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/TestProcedureBypass.StuckStateMachineState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">TestProcedureBypass.StuckStateMachineState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/TestYieldProcedures.TestStateMachineProcedure.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">TestYieldProcedures.TestStateMachineProcedure.State</span></a></li>
 </ul>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.FaultyFileSystem.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.FaultyFileSystem.html
index 29e22c7..a40a5c4 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.FaultyFileSystem.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.FaultyFileSystem.html
@@ -151,7 +151,7 @@ extends org.apache.hadoop.fs.FilterFileSystem</pre>
 <!--   -->
 </a>
 <h3>Nested classes/interfaces inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>org.apache.hadoop.fs.FileSystem.Statistics</code></li>
+<code>org.apache.hadoop.fs.FileSystem.DirectoryEntries, org.apache.hadoop.fs.FileSystem.Statistics</code></li>
 </ul>
 </li>
 </ul>
@@ -184,7 +184,7 @@ extends org.apache.hadoop.fs.FilterFileSystem</pre>
 <!--   -->
 </a>
 <h3>Fields inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, LOG, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX</code></li>
+<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, LOG, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX, USER_HOME_PREFIX</code></li>
 </ul>
 </li>
 </ul>
@@ -234,14 +234,14 @@ extends org.apache.hadoop.fs.FilterFileSystem</pre>
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.fs.FilterFileSystem</h3>
-<code>access, append, canonicalizeUri, checkPath, close, completeLocalOutput, concat, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, create, create, createNonRecursive, createSnapshot, createSymlink, delete, deleteSnapshot, getAclStatus, getAllStoragePolicies, getCanonicalUri, getChildFileSystems, getConf, getDefaultBlockSize, getDefaultBlockSize, getDefaultReplication, getDefaultReplication, getFileBlockLocations, getFileChecksum, getFileChecksum, getFileLinkS [...]
+<code>access, append, appendFile, canonicalizeUri, checkPath, close, completeLocalOutput, concat, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, create, create, createFile, createNonRecursive, createSnapshot, createSymlink, delete, deleteSnapshot, getAclStatus, getAllStoragePolicies, getCanonicalUri, getChildFileSystems, getConf, getDefaultBlockSize, getDefaultBlockSize, getDefaultReplication, getDefaultReplication, getFileBlockLocations, getFileChecksum, getFi [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.fs.FileSystem">
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>addDelegationTokens, append, append, areSymlinksEnabled, cancelDeleteOnExit, clearStatistics, closeAll, closeAllForUGI, copyFromLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, create, createNewFile, createNonRecursive, createNonRecursive, createSnapshot, delete, deleteOnExit, enableSymlinks, exists, fixRelativePart, get, get, get, getAllStatistics, getBlockSize, getCanonicalServiceName, getContentSummary,  [...]
+<code>addDelegationTokens, append, append, areSymlinksEnabled, cancelDeleteOnExit, clearStatistics, closeAll, closeAllForUGI, copyFromLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, create, createNewFile, createNonRecursive, createNonRecursive, createSnapshot, delete, deleteOnExit, enableSymlinks, exists, fixRelativePart, get, get, get, getAllStatistics, getBlockSize, getCanonicalServiceName, getContentSummary,  [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.conf.Configured">
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.FaultyInputStream.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.FaultyInputStream.html
index e0a0b79..14ad2f5 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.FaultyInputStream.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.FaultyInputStream.html
@@ -129,7 +129,7 @@ var activeTableTab = "activeTableTab";
 <li class="blockList">
 <dl>
 <dt>All Implemented Interfaces:</dt>
-<dd><a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/AutoCloseable.html?is-external=true" title="class or interface in java.lang">AutoCloseable</a>, org.apache.hadoop.fs.ByteBufferReadable,  [...]
+<dd><a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/AutoCloseable.html?is-external=true" title="class or interface in java.lang">AutoCloseable</a>, org.apache.hadoop.fs.ByteBufferPositioned [...]
 </dl>
 <dl>
 <dt>Enclosing class:</dt>
@@ -145,6 +145,21 @@ extends org.apache.hadoop.fs.FSDataInputStream</pre>
 <div class="summary">
 <ul class="blockList">
 <li class="blockList">
+<!-- ======== NESTED CLASS SUMMARY ======== -->
+<ul class="blockList">
+<li class="blockList"><a name="nested.class.summary">
+<!--   -->
+</a>
+<h3>Nested Class Summary</h3>
+<ul class="blockList">
+<li class="blockList"><a name="nested.classes.inherited.from.class.org.apache.hadoop.fs.StreamCapabilities">
+<!--   -->
+</a>
+<h3>Nested classes/interfaces inherited from interface&nbsp;org.apache.hadoop.fs.StreamCapabilities</h3>
+<code>org.apache.hadoop.fs.StreamCapabilities.StreamCapability</code></li>
+</ul>
+</li>
+</ul>
 <!-- =========== FIELD SUMMARY =========== -->
 <ul class="blockList">
 <li class="blockList"><a name="field.summary">
@@ -169,6 +184,13 @@ extends org.apache.hadoop.fs.FSDataInputStream</pre>
 <h3>Fields inherited from class&nbsp;java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/FilterInputStream.html?is-external=true" title="class or interface in java.io">FilterInputStream</a></h3>
 <code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/FilterInputStream.html?is-external=true#in" title="class or interface in java.io">in</a></code></li>
 </ul>
+<ul class="blockList">
+<li class="blockList"><a name="fields.inherited.from.class.org.apache.hadoop.fs.StreamCapabilities">
+<!--   -->
+</a>
+<h3>Fields inherited from interface&nbsp;org.apache.hadoop.fs.StreamCapabilities</h3>
+<code>DROPBEHIND, HFLUSH, HSYNC, PREADBYTEBUFFER, READAHEAD, READBYTEBUFFER, UNBUFFER</code></li>
+</ul>
 </li>
 </ul>
 <!-- ======== CONSTRUCTOR SUMMARY ======== -->
@@ -221,7 +243,7 @@ extends org.apache.hadoop.fs.FSDataInputStream</pre>
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.fs.FSDataInputStream</h3>
-<code>getFileDescriptor, getPos, getWrappedStream, read, read, read, readFully, readFully, releaseBuffer, seek, seekToNewSource, setDropBehind, setReadahead, toString, unbuffer</code></li>
+<code>getFileDescriptor, getPos, getWrappedStream, hasCapability, read, read, read, read, readFully, readFully, releaseBuffer, seek, seekToNewSource, setDropBehind, setReadahead, toString, unbuffer</code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.java.io.DataInputStream">
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.MockFileSystem.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.MockFileSystem.html
index 37ef895..2029973 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.MockFileSystem.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.MockFileSystem.html
@@ -152,7 +152,7 @@ extends org.apache.hadoop.fs.FileSystem</pre>
 <!--   -->
 </a>
 <h3>Nested classes/interfaces inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>org.apache.hadoop.fs.FileSystem.Statistics</code></li>
+<code>org.apache.hadoop.fs.FileSystem.DirectoryEntries, org.apache.hadoop.fs.FileSystem.Statistics</code></li>
 </ul>
 </li>
 </ul>
@@ -182,7 +182,7 @@ extends org.apache.hadoop.fs.FileSystem</pre>
 <!--   -->
 </a>
 <h3>Fields inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, LOG, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX</code></li>
+<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, LOG, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX, USER_HOME_PREFIX</code></li>
 </ul>
 </li>
 </ul>
@@ -285,7 +285,7 @@ extends org.apache.hadoop.fs.FileSystem</pre>
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>access, addDelegationTokens, append, append, areSymlinksEnabled, cancelDeleteOnExit, canonicalizeUri, checkPath, clearStatistics, close, closeAll, closeAllForUGI, completeLocalOutput, concat, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, create, create, createNewFile, createNonRecursive, createNonRecursive, createNonRecursi [...]
+<code>access, addDelegationTokens, append, append, appendFile, areSymlinksEnabled, cancelDeleteOnExit, canonicalizeUri, checkPath, clearStatistics, close, closeAll, closeAllForUGI, completeLocalOutput, concat, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, create, create, createFile, createNewFile, createNonRecursive, createNonRec [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.conf.Configured">
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.MockFileSystemForCreate.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.MockFileSystemForCreate.html
index c86be59..d6246a2 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.MockFileSystemForCreate.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.MockFileSystemForCreate.html
@@ -151,7 +151,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/TestHRegion
 <!--   -->
 </a>
 <h3>Nested classes/interfaces inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>org.apache.hadoop.fs.FileSystem.Statistics</code></li>
+<code>org.apache.hadoop.fs.FileSystem.DirectoryEntries, org.apache.hadoop.fs.FileSystem.Statistics</code></li>
 </ul>
 </li>
 </ul>
@@ -173,7 +173,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/TestHRegion
 <!--   -->
 </a>
 <h3>Fields inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, LOG, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX</code></li>
+<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, LOG, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX, USER_HOME_PREFIX</code></li>
 </ul>
 </li>
 </ul>
@@ -223,7 +223,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/TestHRegion
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>access, addDelegationTokens, append, append, areSymlinksEnabled, cancelDeleteOnExit, canonicalizeUri, checkPath, clearStatistics, close, closeAll, closeAllForUGI, completeLocalOutput, concat, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, create, create, createNewFile, createNonRecursive, createNonRecursive, createNonRecursi [...]
+<code>access, addDelegationTokens, append, append, appendFile, areSymlinksEnabled, cancelDeleteOnExit, canonicalizeUri, checkPath, clearStatistics, close, closeAll, closeAllForUGI, completeLocalOutput, concat, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, create, create, createFile, createNewFile, createNonRecursive, createNonRec [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.conf.Configured">
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyFileSystem.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyFileSystem.html
index 5e4ab1f..d86abd0 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyFileSystem.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyFileSystem.html
@@ -153,7 +153,7 @@ extends org.apache.hadoop.fs.FilterFileSystem</pre>
 <!--   -->
 </a>
 <h3>Nested classes/interfaces inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>org.apache.hadoop.fs.FileSystem.Statistics</code></li>
+<code>org.apache.hadoop.fs.FileSystem.DirectoryEntries, org.apache.hadoop.fs.FileSystem.Statistics</code></li>
 </ul>
 </li>
 </ul>
@@ -194,7 +194,7 @@ extends org.apache.hadoop.fs.FilterFileSystem</pre>
 <!--   -->
 </a>
 <h3>Fields inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, LOG, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX</code></li>
+<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, LOG, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX, USER_HOME_PREFIX</code></li>
 </ul>
 </li>
 </ul>
@@ -256,14 +256,14 @@ extends org.apache.hadoop.fs.FilterFileSystem</pre>
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.fs.FilterFileSystem</h3>
-<code>access, append, canonicalizeUri, checkPath, close, completeLocalOutput, concat, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, create, createNonRecursive, createSnapshot, createSymlink, delete, deleteSnapshot, getAclStatus, getAllStoragePolicies, getCanonicalUri, getChildFileSystems, getConf, getDefaultBlockSize, getDefaultBlockSize, getDefaultReplication, getDefaultReplication, getFileBlockLocations, getFileChecksum, getFileChecksum, getFileLinkStatus, g [...]
+<code>access, append, appendFile, canonicalizeUri, checkPath, close, completeLocalOutput, concat, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, create, createFile, createNonRecursive, createSnapshot, createSymlink, delete, deleteSnapshot, getAclStatus, getAllStoragePolicies, getCanonicalUri, getChildFileSystems, getConf, getDefaultBlockSize, getDefaultBlockSize, getDefaultReplication, getDefaultReplication, getFileBlockLocations, getFileChecksum, getFileChecks [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.fs.FileSystem">
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>addDelegationTokens, append, append, areSymlinksEnabled, cancelDeleteOnExit, clearStatistics, closeAll, closeAllForUGI, copyFromLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, createNewFile, createNonRecursive, createSnapshot, delete, deleteOnExit, enableSymlinks, exists, fixRelativePart, get, get, get, getAllStatistics, getBlockSize, getCanonicalServiceName, getContentSummary, getDefaultPort, getDefaultUr [...]
+<code>addDelegationTokens, append, append, areSymlinksEnabled, cancelDeleteOnExit, clearStatistics, closeAll, closeAllForUGI, copyFromLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, createNewFile, createNonRecursive, createSnapshot, delete, deleteOnExit, enableSymlinks, exists, fixRelativePart, get, get, get, getAllStatistics, getBlockSize, getCanonicalServiceName, getContentSummary, getDefaultPort, getDefaultUr [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.conf.Configured">
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyOutputStream.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyOutputStream.html
index fdce80c..425a489 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyOutputStream.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStore.FaultyOutputStream.html
@@ -129,7 +129,7 @@ var activeTableTab = "activeTableTab";
 <li class="blockList">
 <dl>
 <dt>All Implemented Interfaces:</dt>
-<dd><a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutput.html?is-external=true" title="class or interface in java.io">DataOutput</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Flushable.html?is-external=true" title="class or interface in java.io">Flushable</a>, <a href="https://docs.oracle.com/javase/8/docs/api/ [...]
+<dd><a href="https://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutput.html?is-external=true" title="class or interface in java.io">DataOutput</a>, <a href="https://docs.oracle.com/javase/8/docs/api/java/io/Flushable.html?is-external=true" title="class or interface in java.io">Flushable</a>, <a href="https://docs.oracle.com/javase/8/docs/api/ [...]
 </dl>
 <dl>
 <dt>Enclosing class:</dt>
@@ -145,6 +145,21 @@ extends org.apache.hadoop.fs.FSDataOutputStream</pre>
 <div class="summary">
 <ul class="blockList">
 <li class="blockList">
+<!-- ======== NESTED CLASS SUMMARY ======== -->
+<ul class="blockList">
+<li class="blockList"><a name="nested.class.summary">
+<!--   -->
+</a>
+<h3>Nested Class Summary</h3>
+<ul class="blockList">
+<li class="blockList"><a name="nested.classes.inherited.from.class.org.apache.hadoop.fs.StreamCapabilities">
+<!--   -->
+</a>
+<h3>Nested classes/interfaces inherited from interface&nbsp;org.apache.hadoop.fs.StreamCapabilities</h3>
+<code>org.apache.hadoop.fs.StreamCapabilities.StreamCapability</code></li>
+</ul>
+</li>
+</ul>
 <!-- =========== FIELD SUMMARY =========== -->
 <ul class="blockList">
 <li class="blockList"><a name="field.summary">
@@ -180,6 +195,13 @@ extends org.apache.hadoop.fs.FSDataOutputStream</pre>
 <h3>Fields inherited from class&nbsp;java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/FilterOutputStream.html?is-external=true" title="class or interface in java.io">FilterOutputStream</a></h3>
 <code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/FilterOutputStream.html?is-external=true#out" title="class or interface in java.io">out</a></code></li>
 </ul>
+<ul class="blockList">
+<li class="blockList"><a name="fields.inherited.from.class.org.apache.hadoop.fs.StreamCapabilities">
+<!--   -->
+</a>
+<h3>Fields inherited from interface&nbsp;org.apache.hadoop.fs.StreamCapabilities</h3>
+<code>DROPBEHIND, HFLUSH, HSYNC, PREADBYTEBUFFER, READAHEAD, READBYTEBUFFER, UNBUFFER</code></li>
+</ul>
 </li>
 </ul>
 <!-- ======== CONSTRUCTOR SUMMARY ======== -->
@@ -229,7 +251,7 @@ extends org.apache.hadoop.fs.FSDataOutputStream</pre>
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.fs.FSDataOutputStream</h3>
-<code>close, getPos, getWrappedStream, hflush, hsync, setDropBehind, sync</code></li>
+<code>close, getPos, getWrappedStream, hasCapability, hflush, hsync, setDropBehind, sync</code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.java.io.DataOutputStream">
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index b00d54d..fbf93e9 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -272,7 +272,7 @@
 <ul>
 <li type="circle">java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io"><span class="typeNameLink">DataInputStream</span></a> (implements java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>)
 <ul>
-<li type="circle">org.apache.hadoop.fs.FSDataInputStream (implements org.apache.hadoop.fs.ByteBufferReadable, org.apache.hadoop.fs.CanSetDropBehind, org.apache.hadoop.fs.CanSetReadahead, org.apache.hadoop.fs.CanUnbuffer, org.apache.hadoop.fs.HasEnhancedByteBufferAccess, org.apache.hadoop.fs.HasFileDescriptor, org.apache.hadoop.fs.PositionedReadable, org.apache.hadoop.fs.Seekable)
+<li type="circle">org.apache.hadoop.fs.FSDataInputStream (implements org.apache.hadoop.fs.ByteBufferPositionedReadable, org.apache.hadoop.fs.ByteBufferReadable, org.apache.hadoop.fs.CanSetDropBehind, org.apache.hadoop.fs.CanSetReadahead, org.apache.hadoop.fs.CanUnbuffer, org.apache.hadoop.fs.HasEnhancedByteBufferAccess, org.apache.hadoop.fs.HasFileDescriptor, org.apache.hadoop.fs.PositionedReadable, org.apache.hadoop.fs.Seekable, org.apache.hadoop.fs.StreamCapabilities)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.FaultyInputStream.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestFSErrorsExposed.FaultyInputStream</span></a></li>
 </ul>
@@ -340,7 +340,7 @@
 <ul>
 <li type="circle">java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io"><span class="typeNameLink">DataOutputStream</span></a> (implements java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutput.html?is-external=true" title="class or interface in java.io">DataOutput</a>)
 <ul>
-<li type="circle">org.apache.hadoop.fs.FSDataOutputStream (implements org.apache.hadoop.fs.CanSetDropBehind, org.apache.hadoop.fs.Syncable)
+<li type="circle">org.apache.hadoop.fs.FSDataOutputStream (implements org.apache.hadoop.fs.CanSetDropBehind, org.apache.hadoop.fs.StreamCapabilities, org.apache.hadoop.fs.Syncable)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestHStore.FaultyOutputStream.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestHStore.FaultyOutputStream</span></a></li>
 </ul>
@@ -719,11 +719,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.ActionType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestMultiLogThreshold.ActionType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.Manipulation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DataBlockEncodingTool.Manipulation</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestAtomicOperation.TestStep.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestAtomicOperation.TestStep</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.CacheOnWriteType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestCacheOnWriteInSchema.CacheOnWriteType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.ActionType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestMultiLogThreshold.ActionType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.Metric.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestRegionServerReadRequestMetrics.Metric</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.CacheOnWriteType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestCacheOnWriteInSchema.CacheOnWriteType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TestAtomicOperation.TestStep.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestAtomicOperation.TestStep</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/security/access/TestAccessController.MyShellBasedUnixGroupsMapping.html b/testdevapidocs/org/apache/hadoop/hbase/security/access/TestAccessController.MyShellBasedUnixGroupsMapping.html
index d849994..6d6d849 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/security/access/TestAccessController.MyShellBasedUnixGroupsMapping.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/security/access/TestAccessController.MyShellBasedUnixGroupsMapping.html
@@ -75,7 +75,7 @@ var activeTableTab = "activeTableTab";
 <ul class="subNavList">
 <li>Summary:&nbsp;</li>
 <li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#fields.inherited.from.class.org.apache.hadoop.security.ShellBasedUnixGroupsMapping">Field</a>&nbsp;|&nbsp;</li>
 <li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
 <li><a href="#method.summary">Method</a></li>
 </ul>
@@ -100,6 +100,9 @@ var activeTableTab = "activeTableTab";
 <li><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">java.lang.Object</a></li>
 <li>
 <ul class="inheritance">
+<li>org.apache.hadoop.conf.Configured</li>
+<li>
+<ul class="inheritance">
 <li>org.apache.hadoop.security.ShellBasedUnixGroupsMapping</li>
 <li>
 <ul class="inheritance">
@@ -109,12 +112,14 @@ var activeTableTab = "activeTableTab";
 </ul>
 </li>
 </ul>
+</li>
+</ul>
 <div class="description">
 <ul class="blockList">
 <li class="blockList">
 <dl>
 <dt>All Implemented Interfaces:</dt>
-<dd>org.apache.hadoop.security.GroupMappingServiceProvider</dd>
+<dd>org.apache.hadoop.conf.Configurable, org.apache.hadoop.security.GroupMappingServiceProvider</dd>
 </dl>
 <dl>
 <dt>Enclosing class:</dt>
@@ -138,6 +143,13 @@ implements org.apache.hadoop.security.GroupMappingServiceProvider</pre>
 </a>
 <h3>Field Summary</h3>
 <ul class="blockList">
+<li class="blockList"><a name="fields.inherited.from.class.org.apache.hadoop.security.ShellBasedUnixGroupsMapping">
+<!--   -->
+</a>
+<h3>Fields inherited from class&nbsp;org.apache.hadoop.security.ShellBasedUnixGroupsMapping</h3>
+<code>LOG</code></li>
+</ul>
+<ul class="blockList">
 <li class="blockList"><a name="fields.inherited.from.class.org.apache.hadoop.security.GroupMappingServiceProvider">
 <!--   -->
 </a>
@@ -185,7 +197,14 @@ implements org.apache.hadoop.security.GroupMappingServiceProvider</pre>
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.security.ShellBasedUnixGroupsMapping</h3>
-<code>cacheGroupsAdd, cacheGroupsRefresh, createGroupExecutor, createGroupIDExecutor</code></li>
+<code>cacheGroupsAdd, cacheGroupsRefresh, createGroupExecutor, createGroupIDExecutor, getGroupsForUserCommand, getGroupsIDForUserCommand, resolveFullGroupNames, setConf</code></li>
+</ul>
+<ul class="blockList">
+<li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.conf.Configured">
+<!--   -->
+</a>
+<h3>Methods inherited from class&nbsp;org.apache.hadoop.conf.Configured</h3>
+<code>getConf</code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
@@ -304,7 +323,7 @@ implements org.apache.hadoop.security.GroupMappingServiceProvider</pre>
 <ul class="subNavList">
 <li>Summary:&nbsp;</li>
 <li>Nested&nbsp;|&nbsp;</li>
-<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#fields.inherited.from.class.org.apache.hadoop.security.ShellBasedUnixGroupsMapping">Field</a>&nbsp;|&nbsp;</li>
 <li><a href="#constructor.summary">Constr</a>&nbsp;|&nbsp;</li>
 <li><a href="#method.summary">Method</a></li>
 </ul>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/security/access/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
index 057bd16..3ac1918 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
@@ -87,6 +87,15 @@
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/TestAccessController3.FaultyAccessController.html" title="class in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">TestAccessController3.FaultyAccessController</span></a></li>
 </ul>
 </li>
+<li type="circle">org.apache.hadoop.conf.Configured (implements org.apache.hadoop.conf.Configurable)
+<ul>
+<li type="circle">org.apache.hadoop.security.ShellBasedUnixGroupsMapping (implements org.apache.hadoop.security.GroupMappingServiceProvider)
+<ul>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/TestAccessController.MyShellBasedUnixGroupsMapping.html" title="class in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">TestAccessController.MyShellBasedUnixGroupsMapping</span></a> (implements org.apache.hadoop.security.GroupMappingServiceProvider)</li>
+</ul>
+</li>
+</ul>
+</li>
 <li type="circle">org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingService (implements com.google.protobuf.Service)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/TestAccessController.PingCoprocessor.html" title="class in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">TestAccessController.PingCoprocessor</span></a> (implements org.apache.hadoop.hbase.coprocessor.RegionCoprocessor)</li>
@@ -113,11 +122,6 @@
 </ul>
 </li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SecureTestUtil.MasterSyncObserver.html" title="class in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">SecureTestUtil.MasterSyncObserver</span></a> (implements org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.hadoop.hbase.coprocessor.MasterObserver)</li>
-<li type="circle">org.apache.hadoop.security.ShellBasedUnixGroupsMapping (implements org.apache.hadoop.security.GroupMappingServiceProvider)
-<ul>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/TestAccessController.MyShellBasedUnixGroupsMapping.html" title="class in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">TestAccessController.MyShellBasedUnixGroupsMapping</span></a> (implements org.apache.hadoop.security.GroupMappingServiceProvider)</li>
-</ul>
-</li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/TestAccessController.BulkLoadAccessTestAction.html" title="class in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">TestAccessController.BulkLoadAccessTestAction</span></a> (implements org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SecureTestUtil.AccessTestAction.html" title="interface  [...]
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/TestAccessController.BulkLoadHelper.html" title="class in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">TestAccessController.BulkLoadHelper</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/TestCoprocessorWhitelistMasterObserver.TestRegionObserver.html" title="class in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">TestCoprocessorWhitelistMasterObserver.TestRegionObserver</span></a> (implements org.apache.hadoop.hbase.coprocessor.RegionCoprocessor, org.apache.hadoop.hbase.coprocessor.RegionObserver)</li>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html
index 242e168..254dc65 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/test/package-tree.html
@@ -253,8 +253,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.Verify.Counts.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestBigLinkedList.Verify.Counts</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.Counters.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestWithCellVisibilityLoadAndVerify.Counters</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.Verify.Counts.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestBigLinkedList.Verify.Counts</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.Counters.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestLoadAndVerify.Counters</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.test.<a href="../../../../../org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.Generator.Counts.html" title="enum in org.apache.hadoop.hbase.test"><span class="typeNameLink">IntegrationTestBigLinkedList.Generator.Counts</span></a></li>
 </ul>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/util/TestFSHDFSUtils.IsFileClosedDistributedFileSystem.html b/testdevapidocs/org/apache/hadoop/hbase/util/TestFSHDFSUtils.IsFileClosedDistributedFileSystem.html
index 930f2cb..db6e406 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/util/TestFSHDFSUtils.IsFileClosedDistributedFileSystem.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/util/TestFSHDFSUtils.IsFileClosedDistributedFileSystem.html
@@ -152,14 +152,14 @@ extends org.apache.hadoop.hdfs.DistributedFileSystem</pre>
 <!--   -->
 </a>
 <h3>Nested classes/interfaces inherited from class&nbsp;org.apache.hadoop.hdfs.DistributedFileSystem</h3>
-<code>org.apache.hadoop.hdfs.DistributedFileSystem.DiskStatus</code></li>
+<code>org.apache.hadoop.hdfs.DistributedFileSystem.DiskStatus, org.apache.hadoop.hdfs.DistributedFileSystem.HdfsDataOutputStreamBuilder</code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="nested.classes.inherited.from.class.org.apache.hadoop.fs.FileSystem">
 <!--   -->
 </a>
 <h3>Nested classes/interfaces inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>org.apache.hadoop.fs.FileSystem.Statistics</code></li>
+<code>org.apache.hadoop.fs.FileSystem.DirectoryEntries, org.apache.hadoop.fs.FileSystem.Statistics</code></li>
 </ul>
 </li>
 </ul>
@@ -174,7 +174,7 @@ extends org.apache.hadoop.hdfs.DistributedFileSystem</pre>
 <!--   -->
 </a>
 <h3>Fields inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, LOG, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX</code></li>
+<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, LOG, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX, USER_HOME_PREFIX</code></li>
 </ul>
 </li>
 </ul>
@@ -219,14 +219,14 @@ extends org.apache.hadoop.hdfs.DistributedFileSystem</pre>
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.hdfs.DistributedFileSystem</h3>
-<code>access, addCacheDirective, addCacheDirective, addCachePool, addDelegationTokens, allowSnapshot, append, append, append, canonicalizeUri, close, concat, create, create, create, createEncryptionZone, createNonRecursive, createSnapshot, createSymlink, delete, deleteSnapshot, disallowSnapshot, finalizeUpgrade, fixRelativePart, getAclStatus, getAllStoragePolicies, getBytesWithFutureGenerationStamps, getCanonicalServiceName, getClient, getContentSummary, getCorruptBlocksCount, getDataNod [...]
+<code>access, addCacheDirective, addCacheDirective, addCachePool, addDelegationTokens, allowSnapshot, append, append, append, appendFile, canonicalizeUri, close, concat, create, create, create, createEncryptionZone, createFile, createNonRecursive, createSnapshot, createSymlink, delete, deleteSnapshot, disallowSnapshot, finalizeUpgrade, fixRelativePart, getAclStatus, getAllStoragePolicies, getBytesWithFutureGenerationStamps, getCanonicalServiceName, getClient, getContentSummary, getCorrup [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.fs.FileSystem">
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>append, append, areSymlinksEnabled, cancelDeleteOnExit, checkPath, clearStatistics, closeAll, closeAllForUGI, completeLocalOutput, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, create, createNewFile, createNonRecursive, createNonRecursive, createSnapshot, delete, deleteOnExit, enableSymlinks, exists, get, get, get, getAllSt [...]
+<code>append, append, areSymlinksEnabled, cancelDeleteOnExit, checkPath, clearStatistics, closeAll, closeAllForUGI, completeLocalOutput, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, create, createNewFile, createNonRecursive, createNonRecursive, createSnapshot, delete, deleteOnExit, enableSymlinks, exists, get, get, get, getAllSt [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.conf.Configured">
diff --git a/testdevapidocs/org/apache/hadoop/hbase/util/TestFSUtils.AlwaysFailSetStoragePolicyFileSystem.html b/testdevapidocs/org/apache/hadoop/hbase/util/TestFSUtils.AlwaysFailSetStoragePolicyFileSystem.html
index 6c9d8b1..76646df 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/util/TestFSUtils.AlwaysFailSetStoragePolicyFileSystem.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/util/TestFSUtils.AlwaysFailSetStoragePolicyFileSystem.html
@@ -151,14 +151,14 @@ extends org.apache.hadoop.hdfs.DistributedFileSystem</pre>
 <!--   -->
 </a>
 <h3>Nested classes/interfaces inherited from class&nbsp;org.apache.hadoop.hdfs.DistributedFileSystem</h3>
-<code>org.apache.hadoop.hdfs.DistributedFileSystem.DiskStatus</code></li>
+<code>org.apache.hadoop.hdfs.DistributedFileSystem.DiskStatus, org.apache.hadoop.hdfs.DistributedFileSystem.HdfsDataOutputStreamBuilder</code></li>
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="nested.classes.inherited.from.class.org.apache.hadoop.fs.FileSystem">
 <!--   -->
 </a>
 <h3>Nested classes/interfaces inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>org.apache.hadoop.fs.FileSystem.Statistics</code></li>
+<code>org.apache.hadoop.fs.FileSystem.DirectoryEntries, org.apache.hadoop.fs.FileSystem.Statistics</code></li>
 </ul>
 </li>
 </ul>
@@ -173,7 +173,7 @@ extends org.apache.hadoop.hdfs.DistributedFileSystem</pre>
 <!--   -->
 </a>
 <h3>Fields inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, LOG, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX</code></li>
+<code>DEFAULT_FS, FS_DEFAULT_NAME_KEY, LOG, SHUTDOWN_HOOK_PRIORITY, statistics, TRASH_PREFIX, USER_HOME_PREFIX</code></li>
 </ul>
 </li>
 </ul>
@@ -217,14 +217,14 @@ extends org.apache.hadoop.hdfs.DistributedFileSystem</pre>
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.hdfs.DistributedFileSystem</h3>
-<code>access, addCacheDirective, addCacheDirective, addCachePool, addDelegationTokens, allowSnapshot, append, append, append, canonicalizeUri, close, concat, create, create, create, createEncryptionZone, createNonRecursive, createSnapshot, createSymlink, delete, deleteSnapshot, disallowSnapshot, finalizeUpgrade, fixRelativePart, getAclStatus, getAllStoragePolicies, getBytesWithFutureGenerationStamps, getCanonicalServiceName, getClient, getContentSummary, getCorruptBlocksCount, getDataNod [...]
+<code>access, addCacheDirective, addCacheDirective, addCachePool, addDelegationTokens, allowSnapshot, append, append, append, appendFile, canonicalizeUri, close, concat, create, create, create, createEncryptionZone, createFile, createNonRecursive, createSnapshot, createSymlink, delete, deleteSnapshot, disallowSnapshot, finalizeUpgrade, fixRelativePart, getAclStatus, getAllStoragePolicies, getBytesWithFutureGenerationStamps, getCanonicalServiceName, getClient, getContentSummary, getCorrup [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.fs.FileSystem">
 <!--   -->
 </a>
 <h3>Methods inherited from class&nbsp;org.apache.hadoop.fs.FileSystem</h3>
-<code>append, append, areSymlinksEnabled, cancelDeleteOnExit, checkPath, clearStatistics, closeAll, closeAllForUGI, completeLocalOutput, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, create, createNewFile, createNonRecursive, createNonRecursive, createSnapshot, delete, deleteOnExit, enableSymlinks, exists, get, get, get, getAllSt [...]
+<code>append, append, areSymlinksEnabled, cancelDeleteOnExit, checkPath, clearStatistics, closeAll, closeAllForUGI, completeLocalOutput, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyFromLocalFile, copyToLocalFile, copyToLocalFile, copyToLocalFile, create, create, create, create, create, create, create, create, create, create, create, createNewFile, createNonRecursive, createNonRecursive, createSnapshot, delete, deleteOnExit, enableSymlinks, exists, get, get, get, getAllSt [...]
 </ul>
 <ul class="blockList">
 <li class="blockList"><a name="methods.inherited.from.class.org.apache.hadoop.conf.Configured">
diff --git a/testdevapidocs/org/apache/hadoop/hbase/util/TestFutureUtils.html b/testdevapidocs/org/apache/hadoop/hbase/util/TestFutureUtils.html
index 891f696..463118b 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/util/TestFutureUtils.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/util/TestFutureUtils.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 <li class="blockList">
 <hr>
 <br>
-<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html#line.44">TestFutureUtils</a>
+<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html#line.45">TestFutureUtils</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -209,7 +209,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CLASS_RULE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html#line.47">CLASS_RULE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html#line.48">CLASS_RULE</a></pre>
 </li>
 </ul>
 <a name="executor">
@@ -218,7 +218,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>executor</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true" title="class or interface in java.util.concurrent">ExecutorService</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html#line.50">executor</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true" title="class or interface in java.util.concurrent">ExecutorService</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html#line.51">executor</a></pre>
 </li>
 </ul>
 </li>
@@ -235,7 +235,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TestFutureUtils</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html#line.44">TestFutureUtils</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html#line.45">TestFutureUtils</a>()</pre>
 </li>
 </ul>
 </li>
@@ -252,7 +252,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setUp</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html#line.53">setUp</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html#line.54">setUp</a>()</pre>
 </li>
 </ul>
 <a name="tearDown--">
@@ -261,7 +261,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tearDown</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html#line.58">tearDown</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html#line.59">tearDown</a>()</pre>
 </li>
 </ul>
 <a name="testRecordStackTrace--">
@@ -270,7 +270,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>testRecordStackTrace</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html#line.63">testRecordStackTrace</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html#line.64">testRecordStackTrace</a>()
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html b/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
index da87dd9..51e93c5 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
@@ -163,8 +163,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/TestWALSplit.Corruptions.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">TestWALSplit.Corruptions</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/FaultyFSLog.FailureType.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">FaultyFSLog.FailureType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/TestWALSplit.Corruptions.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">TestWALSplit.Corruptions</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/IOTestProvider.AllowedOperations.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">IOTestProvider.AllowedOperations</span></a></li>
 </ul>
 </li>
diff --git a/testdevapidocs/overview-tree.html b/testdevapidocs/overview-tree.html
index 69bb4a7..1f603eb 100644
--- a/testdevapidocs/overview-tree.html
+++ b/testdevapidocs/overview-tree.html
@@ -1034,6 +1034,12 @@
 </ul>
 </li>
 <li type="circle">org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/RESTApiClusterManager.html" title="class in org.apache.hadoop.hbase"><span class="typeNameLink">RESTApiClusterManager</span></a> (implements org.apache.hadoop.hbase.<a href="org/apache/hadoop/hbase/ClusterManager.html" title="interface in org.apache.hadoop.hbase">ClusterManager</a>)</li>
+<li type="circle">org.apache.hadoop.security.ShellBasedUnixGroupsMapping (implements org.apache.hadoop.security.GroupMappingServiceProvider)
+<ul>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="org/apache/hadoop/hbase/security/access/TestAccessController.MyShellBasedUnixGroupsMapping.html" title="class in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">TestAccessController.MyShellBasedUnixGroupsMapping</span></a> (implements org.apache.hadoop.security.GroupMappingServiceProvider)</li>
+<li type="circle">org.apache.hadoop.hbase.http.<a href="org/apache/hadoop/hbase/http/TestHttpServer.MyGroupsProvider.html" title="class in org.apache.hadoop.hbase.http"><span class="typeNameLink">TestHttpServer.MyGroupsProvider</span></a></li>
+</ul>
+</li>
 <li type="circle">org.apache.hadoop.hbase.wal.<a href="org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.html" title="class in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALPerformanceEvaluation</span></a> (implements org.apache.hadoop.util.Tool)</li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.WALPlayer (implements org.apache.hadoop.util.Tool)
 <ul>
@@ -1449,7 +1455,7 @@
 <ul>
 <li type="circle">java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInputStream.html?is-external=true" title="class or interface in java.io"><span class="typeNameLink">DataInputStream</span></a> (implements java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</a>)
 <ul>
-<li type="circle">org.apache.hadoop.fs.FSDataInputStream (implements org.apache.hadoop.fs.ByteBufferReadable, org.apache.hadoop.fs.CanSetDropBehind, org.apache.hadoop.fs.CanSetReadahead, org.apache.hadoop.fs.CanUnbuffer, org.apache.hadoop.fs.HasEnhancedByteBufferAccess, org.apache.hadoop.fs.HasFileDescriptor, org.apache.hadoop.fs.PositionedReadable, org.apache.hadoop.fs.Seekable)
+<li type="circle">org.apache.hadoop.fs.FSDataInputStream (implements org.apache.hadoop.fs.ByteBufferPositionedReadable, org.apache.hadoop.fs.ByteBufferReadable, org.apache.hadoop.fs.CanSetDropBehind, org.apache.hadoop.fs.CanSetReadahead, org.apache.hadoop.fs.CanUnbuffer, org.apache.hadoop.fs.HasEnhancedByteBufferAccess, org.apache.hadoop.fs.HasFileDescriptor, org.apache.hadoop.fs.PositionedReadable, org.apache.hadoop.fs.Seekable, org.apache.hadoop.fs.StreamCapabilities)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.FaultyInputStream.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestFSErrorsExposed.FaultyInputStream</span></a></li>
 </ul>
@@ -1795,7 +1801,7 @@
 <ul>
 <li type="circle">java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutputStream.html?is-external=true" title="class or interface in java.io"><span class="typeNameLink">DataOutputStream</span></a> (implements java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/DataOutput.html?is-external=true" title="class or interface in java.io">DataOutput</a>)
 <ul>
-<li type="circle">org.apache.hadoop.fs.FSDataOutputStream (implements org.apache.hadoop.fs.CanSetDropBehind, org.apache.hadoop.fs.Syncable)
+<li type="circle">org.apache.hadoop.fs.FSDataOutputStream (implements org.apache.hadoop.fs.CanSetDropBehind, org.apache.hadoop.fs.StreamCapabilities, org.apache.hadoop.fs.Syncable)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/TestHStore.FaultyOutputStream.html" title="class in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TestHStore.FaultyOutputStream</span></a></li>
 </ul>
@@ -2446,12 +2452,6 @@
 </li>
 </ul>
 </li>
-<li type="circle">org.apache.hadoop.security.ShellBasedUnixGroupsMapping (implements org.apache.hadoop.security.GroupMappingServiceProvider)
-<ul>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="org/apache/hadoop/hbase/security/access/TestAccessController.MyShellBasedUnixGroupsMapping.html" title="class in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">TestAccessController.MyShellBasedUnixGroupsMapping</span></a> (implements org.apache.hadoop.security.GroupMappingServiceProvider)</li>
-<li type="circle">org.apache.hadoop.hbase.http.<a href="org/apache/hadoop/hbase/http/TestHttpServer.MyGroupsProvider.html" title="class in org.apache.hadoop.hbase.http"><span class="typeNameLink">TestHttpServer.MyGroupsProvider</span></a></li>
-</ul>
-</li>
 <li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.html" title="class in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">SimpleRegionObserver</span></a> (implements org.apache.hadoop.hbase.coprocessor.RegionCoprocessor, org.apache.hadoop.hbase.coprocessor.RegionObserver)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="org/apache/hadoop/hbase/coprocessor/TestCoprocessorHost.SimpleRegionObserverV2.html" title="class in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">TestCoprocessorHost.SimpleRegionObserverV2</span></a></li>
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/http/log/TestLogLevel.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/http/log/TestLogLevel.html
index c5b9537..dd868a4 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/http/log/TestLogLevel.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/http/log/TestLogLevel.html
@@ -488,7 +488,7 @@
 <span class="sourceLineNo">480</span>    Throwable t = throwable;<a name="line.480"></a>
 <span class="sourceLineNo">481</span>    while (t != null) {<a name="line.481"></a>
 <span class="sourceLineNo">482</span>      String msg = t.toString();<a name="line.482"></a>
-<span class="sourceLineNo">483</span>      if (msg != null &amp;&amp; msg.contains(substr)) {<a name="line.483"></a>
+<span class="sourceLineNo">483</span>      if (msg != null &amp;&amp; msg.toLowerCase().contains(substr.toLowerCase())) {<a name="line.483"></a>
 <span class="sourceLineNo">484</span>        return;<a name="line.484"></a>
 <span class="sourceLineNo">485</span>      }<a name="line.485"></a>
 <span class="sourceLineNo">486</span>      t = t.getCause();<a name="line.486"></a>
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html
index d1302b8..1bffafb 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/util/TestFutureUtils.html
@@ -25,74 +25,75 @@
 <span class="sourceLineNo">017</span> */<a name="line.17"></a>
 <span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.util;<a name="line.18"></a>
 <span class="sourceLineNo">019</span><a name="line.19"></a>
-<span class="sourceLineNo">020</span>import static org.hamcrest.CoreMatchers.startsWith;<a name="line.20"></a>
-<span class="sourceLineNo">021</span>import static org.junit.Assert.assertEquals;<a name="line.21"></a>
-<span class="sourceLineNo">022</span>import static org.junit.Assert.assertThat;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import static org.junit.Assert.assertTrue;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import static org.junit.Assert.fail;<a name="line.24"></a>
-<span class="sourceLineNo">025</span><a name="line.25"></a>
-<span class="sourceLineNo">026</span>import java.io.IOException;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import java.util.concurrent.CompletableFuture;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import java.util.concurrent.ExecutorService;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.util.concurrent.Executors;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import java.util.stream.Stream;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.testclassification.MiscTests;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.testclassification.SmallTests;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.junit.After;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.junit.Before;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.junit.ClassRule;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.junit.Test;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.junit.experimental.categories.Category;<a name="line.39"></a>
-<span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.41"></a>
-<span class="sourceLineNo">042</span><a name="line.42"></a>
-<span class="sourceLineNo">043</span>@Category({ MiscTests.class, SmallTests.class })<a name="line.43"></a>
-<span class="sourceLineNo">044</span>public class TestFutureUtils {<a name="line.44"></a>
-<span class="sourceLineNo">045</span><a name="line.45"></a>
-<span class="sourceLineNo">046</span>  @ClassRule<a name="line.46"></a>
-<span class="sourceLineNo">047</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.47"></a>
-<span class="sourceLineNo">048</span>    HBaseClassTestRule.forClass(TestFutureUtils.class);<a name="line.48"></a>
-<span class="sourceLineNo">049</span><a name="line.49"></a>
-<span class="sourceLineNo">050</span>  private ExecutorService executor;<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>  @Before<a name="line.52"></a>
-<span class="sourceLineNo">053</span>  public void setUp() {<a name="line.53"></a>
-<span class="sourceLineNo">054</span>    executor = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder().setDaemon(true).build());<a name="line.54"></a>
-<span class="sourceLineNo">055</span>  }<a name="line.55"></a>
-<span class="sourceLineNo">056</span><a name="line.56"></a>
-<span class="sourceLineNo">057</span>  @After<a name="line.57"></a>
-<span class="sourceLineNo">058</span>  public void tearDown() {<a name="line.58"></a>
-<span class="sourceLineNo">059</span>    executor.shutdownNow();<a name="line.59"></a>
-<span class="sourceLineNo">060</span>  }<a name="line.60"></a>
-<span class="sourceLineNo">061</span><a name="line.61"></a>
-<span class="sourceLineNo">062</span>  @Test<a name="line.62"></a>
-<span class="sourceLineNo">063</span>  public void testRecordStackTrace() throws IOException {<a name="line.63"></a>
-<span class="sourceLineNo">064</span>    CompletableFuture&lt;Void&gt; future = new CompletableFuture&lt;&gt;();<a name="line.64"></a>
-<span class="sourceLineNo">065</span>    executor.execute(() -&gt; future.completeExceptionally(new HBaseIOException("Inject error!")));<a name="line.65"></a>
-<span class="sourceLineNo">066</span>    try {<a name="line.66"></a>
-<span class="sourceLineNo">067</span>      FutureUtils.get(future);<a name="line.67"></a>
-<span class="sourceLineNo">068</span>      fail("The future should have been completed exceptionally");<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    } catch (HBaseIOException e) {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>      assertEquals("Inject error!", e.getMessage());<a name="line.70"></a>
-<span class="sourceLineNo">071</span>      StackTraceElement[] elements = e.getStackTrace();<a name="line.71"></a>
-<span class="sourceLineNo">072</span>      assertThat(elements[0].toString(), startsWith("java.lang.Thread.getStackTrace"));<a name="line.72"></a>
-<span class="sourceLineNo">073</span>      assertThat(elements[1].toString(),<a name="line.73"></a>
-<span class="sourceLineNo">074</span>        startsWith("org.apache.hadoop.hbase.util.FutureUtils.setStackTrace"));<a name="line.74"></a>
-<span class="sourceLineNo">075</span>      assertThat(elements[2].toString(),<a name="line.75"></a>
-<span class="sourceLineNo">076</span>        startsWith("org.apache.hadoop.hbase.util.FutureUtils.rethrow"));<a name="line.76"></a>
-<span class="sourceLineNo">077</span>      assertThat(elements[3].toString(),<a name="line.77"></a>
-<span class="sourceLineNo">078</span>        startsWith("org.apache.hadoop.hbase.util.FutureUtils.get"));<a name="line.78"></a>
-<span class="sourceLineNo">079</span>      assertThat(elements[4].toString(),<a name="line.79"></a>
-<span class="sourceLineNo">080</span>        startsWith("org.apache.hadoop.hbase.util.TestFutureUtils.testRecordStackTrace"));<a name="line.80"></a>
-<span class="sourceLineNo">081</span>      assertTrue(Stream.of(elements)<a name="line.81"></a>
-<span class="sourceLineNo">082</span>        .anyMatch(element -&gt; element.toString().contains("--------Future.get--------")));<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    } catch (Throwable t) {<a name="line.83"></a>
-<span class="sourceLineNo">084</span>      throw new AssertionError("Caught unexpected Throwable", t);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>    }<a name="line.85"></a>
-<span class="sourceLineNo">086</span>  }<a name="line.86"></a>
-<span class="sourceLineNo">087</span>}<a name="line.87"></a>
+<span class="sourceLineNo">020</span>import static org.hamcrest.CoreMatchers.containsString;<a name="line.20"></a>
+<span class="sourceLineNo">021</span>import static org.hamcrest.CoreMatchers.startsWith;<a name="line.21"></a>
+<span class="sourceLineNo">022</span>import static org.junit.Assert.assertEquals;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import static org.junit.Assert.assertThat;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import static org.junit.Assert.assertTrue;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import static org.junit.Assert.fail;<a name="line.25"></a>
+<span class="sourceLineNo">026</span><a name="line.26"></a>
+<span class="sourceLineNo">027</span>import java.io.IOException;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import java.util.concurrent.CompletableFuture;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import java.util.concurrent.ExecutorService;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import java.util.concurrent.Executors;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import java.util.stream.Stream;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.testclassification.MiscTests;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.testclassification.SmallTests;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.junit.After;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.junit.Before;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.junit.ClassRule;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.junit.Test;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.junit.experimental.categories.Category;<a name="line.40"></a>
+<span class="sourceLineNo">041</span><a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;<a name="line.42"></a>
+<span class="sourceLineNo">043</span><a name="line.43"></a>
+<span class="sourceLineNo">044</span>@Category({ MiscTests.class, SmallTests.class })<a name="line.44"></a>
+<span class="sourceLineNo">045</span>public class TestFutureUtils {<a name="line.45"></a>
+<span class="sourceLineNo">046</span><a name="line.46"></a>
+<span class="sourceLineNo">047</span>  @ClassRule<a name="line.47"></a>
+<span class="sourceLineNo">048</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.48"></a>
+<span class="sourceLineNo">049</span>    HBaseClassTestRule.forClass(TestFutureUtils.class);<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>  private ExecutorService executor;<a name="line.51"></a>
+<span class="sourceLineNo">052</span><a name="line.52"></a>
+<span class="sourceLineNo">053</span>  @Before<a name="line.53"></a>
+<span class="sourceLineNo">054</span>  public void setUp() {<a name="line.54"></a>
+<span class="sourceLineNo">055</span>    executor = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder().setDaemon(true).build());<a name="line.55"></a>
+<span class="sourceLineNo">056</span>  }<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>  @After<a name="line.58"></a>
+<span class="sourceLineNo">059</span>  public void tearDown() {<a name="line.59"></a>
+<span class="sourceLineNo">060</span>    executor.shutdownNow();<a name="line.60"></a>
+<span class="sourceLineNo">061</span>  }<a name="line.61"></a>
+<span class="sourceLineNo">062</span><a name="line.62"></a>
+<span class="sourceLineNo">063</span>  @Test<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  public void testRecordStackTrace() throws IOException {<a name="line.64"></a>
+<span class="sourceLineNo">065</span>    CompletableFuture&lt;Void&gt; future = new CompletableFuture&lt;&gt;();<a name="line.65"></a>
+<span class="sourceLineNo">066</span>    executor.execute(() -&gt; future.completeExceptionally(new HBaseIOException("Inject error!")));<a name="line.66"></a>
+<span class="sourceLineNo">067</span>    try {<a name="line.67"></a>
+<span class="sourceLineNo">068</span>      FutureUtils.get(future);<a name="line.68"></a>
+<span class="sourceLineNo">069</span>      fail("The future should have been completed exceptionally");<a name="line.69"></a>
+<span class="sourceLineNo">070</span>    } catch (HBaseIOException e) {<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      assertEquals("Inject error!", e.getMessage());<a name="line.71"></a>
+<span class="sourceLineNo">072</span>      StackTraceElement[] elements = e.getStackTrace();<a name="line.72"></a>
+<span class="sourceLineNo">073</span>      assertThat(elements[0].toString(), containsString("java.lang.Thread.getStackTrace"));<a name="line.73"></a>
+<span class="sourceLineNo">074</span>      assertThat(elements[1].toString(),<a name="line.74"></a>
+<span class="sourceLineNo">075</span>        startsWith("org.apache.hadoop.hbase.util.FutureUtils.setStackTrace"));<a name="line.75"></a>
+<span class="sourceLineNo">076</span>      assertThat(elements[2].toString(),<a name="line.76"></a>
+<span class="sourceLineNo">077</span>        startsWith("org.apache.hadoop.hbase.util.FutureUtils.rethrow"));<a name="line.77"></a>
+<span class="sourceLineNo">078</span>      assertThat(elements[3].toString(),<a name="line.78"></a>
+<span class="sourceLineNo">079</span>        startsWith("org.apache.hadoop.hbase.util.FutureUtils.get"));<a name="line.79"></a>
+<span class="sourceLineNo">080</span>      assertThat(elements[4].toString(),<a name="line.80"></a>
+<span class="sourceLineNo">081</span>        startsWith("org.apache.hadoop.hbase.util.TestFutureUtils.testRecordStackTrace"));<a name="line.81"></a>
+<span class="sourceLineNo">082</span>      assertTrue(Stream.of(elements)<a name="line.82"></a>
+<span class="sourceLineNo">083</span>        .anyMatch(element -&gt; element.toString().contains("--------Future.get--------")));<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    } catch (Throwable t) {<a name="line.84"></a>
+<span class="sourceLineNo">085</span>      throw new AssertionError("Caught unexpected Throwable", t);<a name="line.85"></a>
+<span class="sourceLineNo">086</span>    }<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  }<a name="line.87"></a>
+<span class="sourceLineNo">088</span>}<a name="line.88"></a>