You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by gi...@apache.org on 2020/06/22 14:46:50 UTC

[hbase-site] branch asf-site updated: Published site at 2b5ea44978f8bf48bdc62c2b69dc5c11c68d3720.

This is an automated email from the ASF dual-hosted git repository.

git-site-role pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/hbase-site.git


The following commit(s) were added to refs/heads/asf-site by this push:
     new 88b904b  Published site at 2b5ea44978f8bf48bdc62c2b69dc5c11c68d3720.
88b904b is described below

commit 88b904b37b9809c653a427d47ce721e20e672a39
Author: jenkins <bu...@apache.org>
AuthorDate: Mon Jun 22 14:46:34 2020 +0000

    Published site at 2b5ea44978f8bf48bdc62c2b69dc5c11c68d3720.
---
 acid-semantics.html                                |    2 +-
 apache_hbase_reference_guide.pdf                   |    4 +-
 apidocs/deprecated-list.html                       |   14 +-
 apidocs/index-all.html                             |    7 +
 .../apache/hadoop/hbase/class-use/TableName.html   |   13 +
 .../hadoop/hbase/client/SnapshotDescription.html   |   72 +-
 .../hbase/client/class-use/SnapshotType.html       |   13 +
 .../hadoop/hbase/client/SnapshotDescription.html   |  259 +--
 book.html                                          |    2 +-
 bulk-loads.html                                    |    2 +-
 checkstyle-aggregate.html                          |    8 +-
 coc.html                                           |    2 +-
 dependencies.html                                  |    2 +-
 dependency-convergence.html                        |    2 +-
 dependency-info.html                               |    2 +-
 dependency-management.html                         |    2 +-
 devapidocs/constant-values.html                    |   11 +-
 devapidocs/deprecated-list.html                    |   18 +-
 devapidocs/index-all.html                          |   17 +-
 .../apache/hadoop/hbase/backup/package-tree.html   |    4 +-
 .../apache/hadoop/hbase/class-use/TableName.html   |   25 +-
 .../hadoop/hbase/client/SnapshotDescription.html   |   88 +-
 .../hbase/client/class-use/SnapshotType.html       |   13 +
 .../apache/hadoop/hbase/client/package-tree.html   |   20 +-
 .../hadoop/hbase/coprocessor/package-tree.html     |    2 +-
 .../apache/hadoop/hbase/filter/package-tree.html   |    6 +-
 .../hadoop/hbase/hbtop/field/package-tree.html     |    2 +-
 .../hadoop/hbase/hbtop/terminal/package-tree.html  |    2 +-
 .../org/apache/hadoop/hbase/http/package-tree.html |    2 +-
 .../apache/hadoop/hbase/io/hfile/package-tree.html |    4 +-
 .../org/apache/hadoop/hbase/ipc/package-tree.html  |    4 +-
 .../hadoop/hbase/mapreduce/package-tree.html       |    4 +-
 .../hadoop/hbase/master/balancer/package-tree.html |    2 +-
 .../apache/hadoop/hbase/master/package-tree.html   |    6 +-
 .../hbase/master/procedure/package-tree.html       |    4 +-
 .../hadoop/hbase/monitoring/package-tree.html      |    2 +-
 .../org/apache/hadoop/hbase/package-tree.html      |   16 +-
 .../hadoop/hbase/procedure2/package-tree.html      |    4 +-
 .../hbase/procedure2/store/wal/package-tree.html   |    2 +-
 .../apache/hadoop/hbase/quotas/package-tree.html   |    6 +-
 .../hadoop/hbase/regionserver/package-tree.html    |   22 +-
 .../regionserver/querymatcher/package-tree.html    |    4 +-
 .../hbase/regionserver/wal/package-tree.html       |    2 +-
 .../hadoop/hbase/replication/package-tree.html     |    2 +-
 .../replication/regionserver/package-tree.html     |    2 +-
 .../hadoop/hbase/security/access/package-tree.html |    6 +-
 .../hbase/security/class-use/UserProvider.html     |    5 -
 .../apache/hadoop/hbase/security/package-tree.html |    4 +-
 .../hbase/security/token/FsDelegationToken.html    |   67 +-
 .../apache/hadoop/hbase/thrift/package-tree.html   |    2 +-
 .../tool/BulkLoadHFilesTool.BulkHFileVisitor.html  |    6 +-
 .../hadoop/hbase/tool/BulkLoadHFilesTool.html      |  152 +-
 .../org/apache/hadoop/hbase/util/package-tree.html |   12 +-
 .../org/apache/hadoop/hbase/wal/package-tree.html  |    2 +-
 .../hadoop/hbase/client/SnapshotDescription.html   |  259 +--
 .../hbase/security/token/FsDelegationToken.html    |  228 ++-
 .../tool/BulkLoadHFilesTool.BulkHFileVisitor.html  | 1954 ++++++++++----------
 .../hadoop/hbase/tool/BulkLoadHFilesTool.html      | 1954 ++++++++++----------
 downloads.html                                     |    2 +-
 export_control.html                                |    2 +-
 index.html                                         |    2 +-
 issue-tracking.html                                |    2 +-
 mail-lists.html                                    |    2 +-
 metrics.html                                       |    2 +-
 old_news.html                                      |    2 +-
 plugin-management.html                             |    2 +-
 plugins.html                                       |    2 +-
 poweredbyhbase.html                                |    2 +-
 project-info.html                                  |    2 +-
 project-reports.html                               |    2 +-
 project-summary.html                               |    2 +-
 pseudo-distributed.html                            |    2 +-
 replication.html                                   |    2 +-
 resources.html                                     |    2 +-
 source-repository.html                             |    2 +-
 sponsors.html                                      |    2 +-
 supportingprojects.html                            |    2 +-
 team-list.html                                     |    2 +-
 .../security/token/TestFsDelegationToken.html      |   38 +-
 .../security/token/TestFsDelegationToken.html      |  224 +--
 80 files changed, 2846 insertions(+), 2809 deletions(-)

diff --git a/acid-semantics.html b/acid-semantics.html
index 8ff0d2d..f3b15b2 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -467,7 +467,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index d57fb1b..ba8656d 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.rc.2, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20200621143144+00'00')
-/CreationDate (D:20200621144306+00'00')
+/ModDate (D:20200622143155+00'00')
+/CreationDate (D:20200622144258+00'00')
 >>
 endobj
 2 0 obj
diff --git a/apidocs/deprecated-list.html b/apidocs/deprecated-list.html
index 1580ed9..142e56d 100644
--- a/apidocs/deprecated-list.html
+++ b/apidocs/deprecated-list.html
@@ -976,30 +976,36 @@
 </td>
 </tr>
 <tr class="altColor">
+<td class="colOne"><a href="org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-">org.apache.hadoop.hbase.client.SnapshotDescription(String, TableName, SnapshotType, String, long, int)</a>
+<div class="block"><span class="deprecationComment">since 2.3.0 and will be removed in 4.0.0. Use
+   <a href="org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-"><code>SnapshotDescription.SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map)</code></a></span></div>
+</td>
+</tr>
+<tr class="rowColor">
 <td class="colOne"><a href="org/apache/hadoop/hbase/io/TimeRange.html#TimeRange--">org.apache.hadoop.hbase.io.TimeRange()</a>
 <div class="block"><span class="deprecationComment">This is made @InterfaceAudience.Private in the 2.0 line and above and may be
  changed to private or removed in 3.0.</span></div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="org/apache/hadoop/hbase/io/TimeRange.html#TimeRange-byte:A-">org.apache.hadoop.hbase.io.TimeRange(byte[])</a>
 <div class="block"><span class="deprecationComment">This is made @InterfaceAudience.Private in the 2.0 line and above and may be
  changed to private or removed in 3.0.</span></div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="org/apache/hadoop/hbase/io/TimeRange.html#TimeRange-byte:A-byte:A-">org.apache.hadoop.hbase.io.TimeRange(byte[], byte[])</a>
 <div class="block"><span class="deprecationComment">This is made @InterfaceAudience.Private in the 2.0 line and above and may be
  changed to private or removed in 3.0.</span></div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="org/apache/hadoop/hbase/io/TimeRange.html#TimeRange-long-">org.apache.hadoop.hbase.io.TimeRange(long)</a>
 <div class="block"><span class="deprecationComment">This is made @InterfaceAudience.Private in the 2.0 line and above and may be
  changed to private or removed in 3.0.</span></div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="org/apache/hadoop/hbase/io/TimeRange.html#TimeRange-long-long-">org.apache.hadoop.hbase.io.TimeRange(long, long)</a>
 <div class="block"><span class="deprecationComment">This is made @InterfaceAudience.Private in the 2.0 line and above and may be
  changed to private or removed in 3.0.</span></div>
diff --git a/apidocs/index-all.html b/apidocs/index-all.html
index bc2f8e6..2915cc6 100644
--- a/apidocs/index-all.html
+++ b/apidocs/index-all.html
@@ -15971,6 +15971,13 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-">SnapshotDescription(String, TableName, SnapshotType, String)</a></span> - Constructor for class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/SnapshotDescription.html" title="class in org.apache.hadoop.hbase.client">SnapshotDescription [...]
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-">SnapshotDescription(String, TableName, SnapshotType, String, long, int)</a></span> - Constructor for class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/SnapshotDescription.html" title="class in org.apache.hadoop.hbase.client" [...]
+<dd>
+<div class="block"><span class="deprecatedLabel">Deprecated.</span>
+<div class="block"><span class="deprecationComment">since 2.3.0 and will be removed in 4.0.0. Use
+   <a href="org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-"><code>SnapshotDescription.SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map)</code></a></span></div>
+</div>
+</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-">SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map&lt;String, Object&gt;)</a></span> - Constructor for class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/SnapshotDescription.html" title= [...]
 <dd>
 <div class="block">SnapshotDescription Parameterized Constructor</div>
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 0d03f09..a1a05dd 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -1313,6 +1313,19 @@ Input/OutputFormats, a table indexing MapReduce job, and utility methods.</div>
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner)</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-">SnapshotDescription</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+                   <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
+                   <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
+                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner,
+                   long&nbsp;creationTime,
+                   int&nbsp;version)</code>
+<div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;
+<div class="block"><span class="deprecationComment">since 2.3.0 and will be removed in 4.0.0. Use
+   <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-"><code>SnapshotDescription.SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map)</code></a></span></div>
+</div>
+</td>
+</tr>
+<tr class="altColor">
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-">SnapshotDescription</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                    <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
                    <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
diff --git a/apidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html b/apidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html
index 9ef7b71..7a353c4 100644
--- a/apidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html
+++ b/apidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Public
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.31">SnapshotDescription</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.32">SnapshotDescription</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">The POJO equivalent of HBaseProtos.SnapshotDescription</div>
 </li>
@@ -157,6 +157,19 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner)</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
+<td class="colOne"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-">SnapshotDescription</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+                   <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
+                   <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
+                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner,
+                   long&nbsp;creationTime,
+                   int&nbsp;version)</code>
+<div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;
+<div class="block"><span class="deprecationComment">since 2.3.0 and will be removed in 4.0.0. Use
+   <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-"><code>SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map)</code></a></span></div>
+</div>
+</td>
+</tr>
+<tr class="altColor">
 <td class="colOne"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-">SnapshotDescription</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                    <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
                    <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
@@ -246,7 +259,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>SnapshotDescription</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.40">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.41">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 </li>
 </ul>
 <a name="SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-">
@@ -255,7 +268,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>SnapshotDescription</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.44">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.45">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                            <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table)</pre>
 </li>
 </ul>
@@ -265,7 +278,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>SnapshotDescription</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.48">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.49">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                            <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
                            <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type)</pre>
 </li>
@@ -276,19 +289,46 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>SnapshotDescription</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.52">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.53">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                            <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
                            <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner)</pre>
 </li>
 </ul>
+<a name="SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>SnapshotDescription</h4>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
+public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.70">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+                                       <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
+                                       <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
+                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner,
+                                       long&nbsp;creationTime,
+                                       int&nbsp;version)</pre>
+<div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">since 2.3.0 and will be removed in 4.0.0. Use
+   <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-"><code>SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map)</code></a></span></div>
+<div class="block">SnapshotDescription Parameterized Constructor</div>
+<dl>
+<dt><span class="paramLabel">Parameters:</span></dt>
+<dd><code>name</code> - Name of the snapshot</dd>
+<dd><code>table</code> - TableName associated with the snapshot</dd>
+<dd><code>type</code> - Type of the snapshot - enum SnapshotType</dd>
+<dd><code>owner</code> - Snapshot Owner</dd>
+<dd><code>creationTime</code> - Creation time for Snapshot</dd>
+<dd><code>version</code> - Snapshot Version</dd>
+</dl>
+</li>
+</ul>
 <a name="SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>SnapshotDescription</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.67">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.86">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                            <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
                            <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner,
@@ -314,7 +354,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SnapshotDescription</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.90">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;snapshotName,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.109">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;snapshotName,
                            <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                            <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&gt;&nbsp;snapshotProps)</pre>
@@ -342,7 +382,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getName</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.95">getName</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.114">getName</a>()</pre>
 </li>
 </ul>
 <a name="getTableNameAsString--">
@@ -351,7 +391,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableNameAsString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.99">getTableNameAsString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.118">getTableNameAsString</a>()</pre>
 </li>
 </ul>
 <a name="getTableName--">
@@ -360,7 +400,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableName</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.103">getTableName</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.122">getTableName</a>()</pre>
 </li>
 </ul>
 <a name="getType--">
@@ -369,7 +409,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getType</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.107">getType</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.126">getType</a>()</pre>
 </li>
 </ul>
 <a name="getOwner--">
@@ -378,7 +418,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getOwner</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.111">getOwner</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.130">getOwner</a>()</pre>
 </li>
 </ul>
 <a name="getCreationTime--">
@@ -387,7 +427,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getCreationTime</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.115">getCreationTime</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.134">getCreationTime</a>()</pre>
 </li>
 </ul>
 <a name="getTtl--">
@@ -396,7 +436,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getTtl</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.120">getTtl</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.139">getTtl</a>()</pre>
 </li>
 </ul>
 <a name="getVersion--">
@@ -405,7 +445,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getVersion</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.124">getVersion</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.143">getVersion</a>()</pre>
 </li>
 </ul>
 <a name="toString--">
@@ -414,7 +454,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.129">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.148">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html b/apidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html
index 50ce678..5c8dc9a 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html
@@ -183,6 +183,19 @@ the order they are declared.</div>
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner)</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-">SnapshotDescription</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+                   <a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
+                   <a href="../../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
+                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner,
+                   long&nbsp;creationTime,
+                   int&nbsp;version)</code>
+<div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;
+<div class="block"><span class="deprecationComment">since 2.3.0 and will be removed in 4.0.0. Use
+   <a href="../../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-"><code>SnapshotDescription.SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map)</code></a></span></div>
+</div>
+</td>
+</tr>
+<tr class="altColor">
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-">SnapshotDescription</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                    <a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
                    <a href="../../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html b/apidocs/src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html
index 395dfc9..deed7fb 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html
@@ -6,7 +6,7 @@
 </head>
 <body>
 <div class="sourceContainer">
-<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a>
+<pre><span class="sourceLineNo">001</span>/*<a name="line.1"></a>
 <span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a>
 <span class="sourceLineNo">003</span> * or more contributor license agreements.  See the NOTICE file<a name="line.3"></a>
 <span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a>
@@ -27,127 +27,144 @@
 <span class="sourceLineNo">019</span><a name="line.19"></a>
 <span class="sourceLineNo">020</span>import java.util.Map;<a name="line.20"></a>
 <span class="sourceLineNo">021</span><a name="line.21"></a>
-<span class="sourceLineNo">022</span>import org.apache.hadoop.hbase.TableName;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.23"></a>
-<span class="sourceLineNo">024</span><a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.25"></a>
-<span class="sourceLineNo">026</span><a name="line.26"></a>
-<span class="sourceLineNo">027</span>/**<a name="line.27"></a>
-<span class="sourceLineNo">028</span> * The POJO equivalent of HBaseProtos.SnapshotDescription<a name="line.28"></a>
-<span class="sourceLineNo">029</span> */<a name="line.29"></a>
-<span class="sourceLineNo">030</span>@InterfaceAudience.Public<a name="line.30"></a>
-<span class="sourceLineNo">031</span>public class SnapshotDescription {<a name="line.31"></a>
-<span class="sourceLineNo">032</span>  private final String name;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>  private final TableName table;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>  private final SnapshotType snapShotType;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>  private final String owner;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>  private final long creationTime;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>  private final long ttl;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>  private final int version;<a name="line.38"></a>
-<span class="sourceLineNo">039</span><a name="line.39"></a>
-<span class="sourceLineNo">040</span>  public SnapshotDescription(String name) {<a name="line.40"></a>
-<span class="sourceLineNo">041</span>    this(name, (TableName)null);<a name="line.41"></a>
-<span class="sourceLineNo">042</span>  }<a name="line.42"></a>
-<span class="sourceLineNo">043</span><a name="line.43"></a>
-<span class="sourceLineNo">044</span>  public SnapshotDescription(String name, TableName table) {<a name="line.44"></a>
-<span class="sourceLineNo">045</span>    this(name, table, SnapshotType.DISABLED, null, -1, -1, null);<a name="line.45"></a>
-<span class="sourceLineNo">046</span>  }<a name="line.46"></a>
-<span class="sourceLineNo">047</span><a name="line.47"></a>
-<span class="sourceLineNo">048</span>  public SnapshotDescription(String name, TableName table, SnapshotType type) {<a name="line.48"></a>
-<span class="sourceLineNo">049</span>    this(name, table, type, null, -1, -1, null);<a name="line.49"></a>
-<span class="sourceLineNo">050</span>  }<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>  public SnapshotDescription(String name, TableName table, SnapshotType type, String owner) {<a name="line.52"></a>
-<span class="sourceLineNo">053</span>    this(name, table, type, owner, -1, -1, null);<a name="line.53"></a>
-<span class="sourceLineNo">054</span>  }<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  /**<a name="line.56"></a>
-<span class="sourceLineNo">057</span>   * SnapshotDescription Parameterized Constructor<a name="line.57"></a>
-<span class="sourceLineNo">058</span>   *<a name="line.58"></a>
-<span class="sourceLineNo">059</span>   * @param name          Name of the snapshot<a name="line.59"></a>
-<span class="sourceLineNo">060</span>   * @param table         TableName associated with the snapshot<a name="line.60"></a>
-<span class="sourceLineNo">061</span>   * @param type          Type of the snapshot - enum SnapshotType<a name="line.61"></a>
-<span class="sourceLineNo">062</span>   * @param owner         Snapshot Owner<a name="line.62"></a>
-<span class="sourceLineNo">063</span>   * @param creationTime  Creation time for Snapshot<a name="line.63"></a>
-<span class="sourceLineNo">064</span>   * @param version       Snapshot Version<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   * @param snapshotProps Additional properties for snapshot e.g. TTL<a name="line.65"></a>
-<span class="sourceLineNo">066</span>   */<a name="line.66"></a>
-<span class="sourceLineNo">067</span>  public SnapshotDescription(String name, TableName table, SnapshotType type, String owner,<a name="line.67"></a>
-<span class="sourceLineNo">068</span>                             long creationTime, int version, Map&lt;String, Object&gt; snapshotProps) {<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    this.name = name;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    this.table = table;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>    this.snapShotType = type;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    this.owner = owner;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>    this.creationTime = creationTime;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    this.ttl = getTtlFromSnapshotProps(snapshotProps);<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    this.version = version;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>  }<a name="line.76"></a>
-<span class="sourceLineNo">077</span><a name="line.77"></a>
-<span class="sourceLineNo">078</span>  private long getTtlFromSnapshotProps(Map&lt;String, Object&gt; snapshotProps) {<a name="line.78"></a>
-<span class="sourceLineNo">079</span>    return MapUtils.getLongValue(snapshotProps, "TTL", -1);<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  }<a name="line.80"></a>
-<span class="sourceLineNo">081</span><a name="line.81"></a>
-<span class="sourceLineNo">082</span>  /**<a name="line.82"></a>
-<span class="sourceLineNo">083</span>   * SnapshotDescription Parameterized Constructor<a name="line.83"></a>
-<span class="sourceLineNo">084</span>   *<a name="line.84"></a>
-<span class="sourceLineNo">085</span>   * @param snapshotName  Name of the snapshot<a name="line.85"></a>
-<span class="sourceLineNo">086</span>   * @param tableName     TableName associated with the snapshot<a name="line.86"></a>
-<span class="sourceLineNo">087</span>   * @param type          Type of the snapshot - enum SnapshotType<a name="line.87"></a>
-<span class="sourceLineNo">088</span>   * @param snapshotProps Additional properties for snapshot e.g. TTL<a name="line.88"></a>
-<span class="sourceLineNo">089</span>   */<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  public SnapshotDescription(String snapshotName, TableName tableName, SnapshotType type,<a name="line.90"></a>
-<span class="sourceLineNo">091</span>                             Map&lt;String, Object&gt; snapshotProps) {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    this(snapshotName, tableName, type, null, -1, -1, snapshotProps);<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  }<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  public String getName() {<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    return this.name;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  }<a name="line.97"></a>
-<span class="sourceLineNo">098</span><a name="line.98"></a>
-<span class="sourceLineNo">099</span>  public String getTableNameAsString() {<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    return this.table.getNameAsString();<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  }<a name="line.101"></a>
-<span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  public TableName getTableName() {<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    return this.table;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
-<span class="sourceLineNo">106</span><a name="line.106"></a>
-<span class="sourceLineNo">107</span>  public SnapshotType getType() {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    return this.snapShotType;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
-<span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>  public String getOwner() {<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    return this.owner;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>  }<a name="line.113"></a>
-<span class="sourceLineNo">114</span><a name="line.114"></a>
-<span class="sourceLineNo">115</span>  public long getCreationTime() {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    return this.creationTime;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>  }<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  // get snapshot ttl in sec<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public long getTtl() {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    return ttl;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>  }<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span>  public int getVersion() {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    return this.version;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
-<span class="sourceLineNo">127</span><a name="line.127"></a>
-<span class="sourceLineNo">128</span>  @Override<a name="line.128"></a>
-<span class="sourceLineNo">129</span>  public String toString() {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    return new StringBuilder("SnapshotDescription: ")<a name="line.130"></a>
-<span class="sourceLineNo">131</span>            .append("name = ")<a name="line.131"></a>
-<span class="sourceLineNo">132</span>            .append(name)<a name="line.132"></a>
-<span class="sourceLineNo">133</span>            .append("/table = ")<a name="line.133"></a>
-<span class="sourceLineNo">134</span>            .append(table)<a name="line.134"></a>
-<span class="sourceLineNo">135</span>            .append(" /owner = ")<a name="line.135"></a>
-<span class="sourceLineNo">136</span>            .append(owner)<a name="line.136"></a>
-<span class="sourceLineNo">137</span>            .append(creationTime != -1 ? ("/creationtime = " + creationTime) : "")<a name="line.137"></a>
-<span class="sourceLineNo">138</span>            .append(ttl != -1 ? ("/ttl = " + ttl) : "")<a name="line.138"></a>
-<span class="sourceLineNo">139</span>            .append(version != -1 ? ("/version = " + version) : "")<a name="line.139"></a>
-<span class="sourceLineNo">140</span>            .toString();<a name="line.140"></a>
+<span class="sourceLineNo">022</span>import org.apache.commons.lang3.builder.ToStringBuilder;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import org.apache.hadoop.hbase.TableName;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.24"></a>
+<span class="sourceLineNo">025</span><a name="line.25"></a>
+<span class="sourceLineNo">026</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.26"></a>
+<span class="sourceLineNo">027</span><a name="line.27"></a>
+<span class="sourceLineNo">028</span>/**<a name="line.28"></a>
+<span class="sourceLineNo">029</span> * The POJO equivalent of HBaseProtos.SnapshotDescription<a name="line.29"></a>
+<span class="sourceLineNo">030</span> */<a name="line.30"></a>
+<span class="sourceLineNo">031</span>@InterfaceAudience.Public<a name="line.31"></a>
+<span class="sourceLineNo">032</span>public class SnapshotDescription {<a name="line.32"></a>
+<span class="sourceLineNo">033</span>  private final String name;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>  private final TableName table;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>  private final SnapshotType snapShotType;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>  private final String owner;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>  private final long creationTime;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>  private final long ttl;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>  private final int version;<a name="line.39"></a>
+<span class="sourceLineNo">040</span><a name="line.40"></a>
+<span class="sourceLineNo">041</span>  public SnapshotDescription(String name) {<a name="line.41"></a>
+<span class="sourceLineNo">042</span>    this(name, null);<a name="line.42"></a>
+<span class="sourceLineNo">043</span>  }<a name="line.43"></a>
+<span class="sourceLineNo">044</span><a name="line.44"></a>
+<span class="sourceLineNo">045</span>  public SnapshotDescription(String name, TableName table) {<a name="line.45"></a>
+<span class="sourceLineNo">046</span>    this(name, table, SnapshotType.DISABLED, null, -1, -1, null);<a name="line.46"></a>
+<span class="sourceLineNo">047</span>  }<a name="line.47"></a>
+<span class="sourceLineNo">048</span><a name="line.48"></a>
+<span class="sourceLineNo">049</span>  public SnapshotDescription(String name, TableName table, SnapshotType type) {<a name="line.49"></a>
+<span class="sourceLineNo">050</span>    this(name, table, type, null, -1, -1, null);<a name="line.50"></a>
+<span class="sourceLineNo">051</span>  }<a name="line.51"></a>
+<span class="sourceLineNo">052</span><a name="line.52"></a>
+<span class="sourceLineNo">053</span>  public SnapshotDescription(String name, TableName table, SnapshotType type, String owner) {<a name="line.53"></a>
+<span class="sourceLineNo">054</span>    this(name, table, type, owner, -1, -1, null);<a name="line.54"></a>
+<span class="sourceLineNo">055</span>  }<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>  /**<a name="line.57"></a>
+<span class="sourceLineNo">058</span>   * SnapshotDescription Parameterized Constructor<a name="line.58"></a>
+<span class="sourceLineNo">059</span>   *<a name="line.59"></a>
+<span class="sourceLineNo">060</span>   * @param name Name of the snapshot<a name="line.60"></a>
+<span class="sourceLineNo">061</span>   * @param table TableName associated with the snapshot<a name="line.61"></a>
+<span class="sourceLineNo">062</span>   * @param type Type of the snapshot - enum SnapshotType<a name="line.62"></a>
+<span class="sourceLineNo">063</span>   * @param owner Snapshot Owner<a name="line.63"></a>
+<span class="sourceLineNo">064</span>   * @param creationTime Creation time for Snapshot<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   * @param version Snapshot Version<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   * @deprecated since 2.3.0 and will be removed in 4.0.0. Use<a name="line.66"></a>
+<span class="sourceLineNo">067</span>   *   {@link #SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map)}<a name="line.67"></a>
+<span class="sourceLineNo">068</span>   */<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  @Deprecated<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  public SnapshotDescription(String name, TableName table, SnapshotType type, String owner,<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      long creationTime, int version) {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>    this(name, table, type, owner, creationTime, version, null);<a name="line.72"></a>
+<span class="sourceLineNo">073</span>  }<a name="line.73"></a>
+<span class="sourceLineNo">074</span><a name="line.74"></a>
+<span class="sourceLineNo">075</span>  /**<a name="line.75"></a>
+<span class="sourceLineNo">076</span>   * SnapshotDescription Parameterized Constructor<a name="line.76"></a>
+<span class="sourceLineNo">077</span>   *<a name="line.77"></a>
+<span class="sourceLineNo">078</span>   * @param name          Name of the snapshot<a name="line.78"></a>
+<span class="sourceLineNo">079</span>   * @param table         TableName associated with the snapshot<a name="line.79"></a>
+<span class="sourceLineNo">080</span>   * @param type          Type of the snapshot - enum SnapshotType<a name="line.80"></a>
+<span class="sourceLineNo">081</span>   * @param owner         Snapshot Owner<a name="line.81"></a>
+<span class="sourceLineNo">082</span>   * @param creationTime  Creation time for Snapshot<a name="line.82"></a>
+<span class="sourceLineNo">083</span>   * @param version       Snapshot Version<a name="line.83"></a>
+<span class="sourceLineNo">084</span>   * @param snapshotProps Additional properties for snapshot e.g. TTL<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   */<a name="line.85"></a>
+<span class="sourceLineNo">086</span>  public SnapshotDescription(String name, TableName table, SnapshotType type, String owner,<a name="line.86"></a>
+<span class="sourceLineNo">087</span>      long creationTime, int version, Map&lt;String, Object&gt; snapshotProps) {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    this.name = name;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    this.table = table;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>    this.snapShotType = type;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    this.owner = owner;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    this.creationTime = creationTime;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    this.ttl = getTtlFromSnapshotProps(snapshotProps);<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    this.version = version;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  }<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>  private long getTtlFromSnapshotProps(Map&lt;String, Object&gt; snapshotProps) {<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    return MapUtils.getLongValue(snapshotProps, "TTL", -1);<a name="line.98"></a>
+<span class="sourceLineNo">099</span>  }<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  /**<a name="line.101"></a>
+<span class="sourceLineNo">102</span>   * SnapshotDescription Parameterized Constructor<a name="line.102"></a>
+<span class="sourceLineNo">103</span>   *<a name="line.103"></a>
+<span class="sourceLineNo">104</span>   * @param snapshotName  Name of the snapshot<a name="line.104"></a>
+<span class="sourceLineNo">105</span>   * @param tableName     TableName associated with the snapshot<a name="line.105"></a>
+<span class="sourceLineNo">106</span>   * @param type          Type of the snapshot - enum SnapshotType<a name="line.106"></a>
+<span class="sourceLineNo">107</span>   * @param snapshotProps Additional properties for snapshot e.g. TTL<a name="line.107"></a>
+<span class="sourceLineNo">108</span>   */<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  public SnapshotDescription(String snapshotName, TableName tableName, SnapshotType type,<a name="line.109"></a>
+<span class="sourceLineNo">110</span>                             Map&lt;String, Object&gt; snapshotProps) {<a name="line.110"></a>
+<span class="sourceLineNo">111</span>    this(snapshotName, tableName, type, null, -1, -1, snapshotProps);<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  }<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  public String getName() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    return this.name;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  public String getTableNameAsString() {<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    return this.table.getNameAsString();<a name="line.119"></a>
+<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public TableName getTableName() {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    return this.table;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>  }<a name="line.124"></a>
+<span class="sourceLineNo">125</span><a name="line.125"></a>
+<span class="sourceLineNo">126</span>  public SnapshotType getType() {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>    return this.snapShotType;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
+<span class="sourceLineNo">129</span><a name="line.129"></a>
+<span class="sourceLineNo">130</span>  public String getOwner() {<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    return this.owner;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>  }<a name="line.132"></a>
+<span class="sourceLineNo">133</span><a name="line.133"></a>
+<span class="sourceLineNo">134</span>  public long getCreationTime() {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    return this.creationTime;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>  }<a name="line.136"></a>
+<span class="sourceLineNo">137</span><a name="line.137"></a>
+<span class="sourceLineNo">138</span>  // get snapshot ttl in sec<a name="line.138"></a>
+<span class="sourceLineNo">139</span>  public long getTtl() {<a name="line.139"></a>
+<span class="sourceLineNo">140</span>    return ttl;<a name="line.140"></a>
 <span class="sourceLineNo">141</span>  }<a name="line.141"></a>
-<span class="sourceLineNo">142</span>}<a name="line.142"></a>
+<span class="sourceLineNo">142</span><a name="line.142"></a>
+<span class="sourceLineNo">143</span>  public int getVersion() {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    return this.version;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
+<span class="sourceLineNo">146</span><a name="line.146"></a>
+<span class="sourceLineNo">147</span>  @Override<a name="line.147"></a>
+<span class="sourceLineNo">148</span>  public String toString() {<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    return new ToStringBuilder(this)<a name="line.149"></a>
+<span class="sourceLineNo">150</span>      .append("name", name)<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      .append("table", table)<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      .append("snapShotType", snapShotType)<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      .append("owner", owner)<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      .append("creationTime", creationTime)<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      .append("ttl", ttl)<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      .append("version", version)<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      .toString();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
+<span class="sourceLineNo">159</span>}<a name="line.159"></a>
 
 
 
diff --git a/book.html b/book.html
index 868fdf5..a2a239c 100644
--- a/book.html
+++ b/book.html
@@ -45927,7 +45927,7 @@ org/apache/hadoop/hbase/security/access/AccessControlClient.revoke:(Lorg/apache/
 <div id="footer">
 <div id="footer-text">
 Version 3.0.0-SNAPSHOT<br>
-Last updated 2020-06-21 14:31:44 UTC
+Last updated 2020-06-22 14:31:55 UTC
 </div>
 </div>
 <script type="text/x-mathjax-config">
diff --git a/bulk-loads.html b/bulk-loads.html
index 0f5afc7..5a81621 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -172,7 +172,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 016d2b1..1133cca 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7448,12 +7448,12 @@
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_javadoc.html#JavadocTagContinuationIndentation">JavadocTagContinuationIndentation</a>
 <ul>
 <li>offset: <tt>&quot;2&quot;</tt></li></ul></td>
-<td>576</td>
+<td>575</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="b">
 <td></td>
 <td><a class="externalLink" href="http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription">NonEmptyAtclauseDescription</a></td>
-<td>2432</td>
+<td>2433</td>
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td></tr>
 <tr class="a">
 <td>misc</td>
@@ -11788,7 +11788,7 @@
 <tr class="a">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>javadoc</td>
-<td>JavadocTagContinuationIndentation</td>
+<td>NonEmptyAtclauseDescription</td>
 <td>Javadoc comment at column 47 has parse error. Details: no viable alternative at input '&lt;qualifier,' while parsing HTML_ELEMENT</td>
 <td>387</td></tr>
 <tr class="b">
@@ -78351,7 +78351,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/coc.html b/coc.html
index 1890b52..7700efb 100644
--- a/coc.html
+++ b/coc.html
@@ -241,7 +241,7 @@ email to <a class="externalLink" href="mailto:private@hbase.apache.org">the priv
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependencies.html b/dependencies.html
index 75dbd0b..bd13213 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -313,7 +313,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 407c2eb..6f2d69b 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -788,7 +788,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-info.html b/dependency-info.html
index 71e979a..ca1e07c 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -194,7 +194,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/dependency-management.html b/dependency-management.html
index 151b705..5eb0c03 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -1096,7 +1096,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index ecdfed9..8e18634 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -30180,27 +30180,20 @@
 <td class="colLast"><code>"hbase.mapreduce.bulkload.by.family"</code></td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><a name="org.apache.hadoop.hbase.tool.BulkLoadHFilesTool.BULK_LOAD_RENEW_TOKEN_TIME_BUFFER">
-<!--   -->
-</a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
-<td><code><a href="org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#BULK_LOAD_RENEW_TOKEN_TIME_BUFFER">BULK_LOAD_RENEW_TOKEN_TIME_BUFFER</a></code></td>
-<td class="colLast"><code>"hbase.bulkload.renew.token.time.buffer"</code></td>
-</tr>
-<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.tool.BulkLoadHFilesTool.NAME">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#NAME">NAME</a></code></td>
 <td class="colLast"><code>"completebulkload"</code></td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.tool.BulkLoadHFilesTool.TMP_DIR">
 <!--   -->
 </a><code>static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#TMP_DIR">TMP_DIR</a></code></td>
 <td class="colLast"><code>".tmp"</code></td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.tool.BulkLoadHFilesTool.VALIDATE_HFILES">
 <!--   -->
 </a><code>private&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html
index a93a438..e9e2716 100644
--- a/devapidocs/deprecated-list.html
+++ b/devapidocs/deprecated-list.html
@@ -1919,42 +1919,48 @@
 </td>
 </tr>
 <tr class="rowColor">
+<td class="colOne"><a href="org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-">org.apache.hadoop.hbase.client.SnapshotDescription(String, TableName, SnapshotType, String, long, int)</a>
+<div class="block"><span class="deprecationComment">since 2.3.0 and will be removed in 4.0.0. Use
+   <a href="org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-"><code>SnapshotDescription.SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map)</code></a></span></div>
+</td>
+</tr>
+<tr class="altColor">
 <td class="colOne"><a href="org/apache/hadoop/hbase/io/TimeRange.html#TimeRange--">org.apache.hadoop.hbase.io.TimeRange()</a>
 <div class="block"><span class="deprecationComment">This is made @InterfaceAudience.Private in the 2.0 line and above and may be
  changed to private or removed in 3.0.</span></div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="org/apache/hadoop/hbase/io/TimeRange.html#TimeRange-byte:A-">org.apache.hadoop.hbase.io.TimeRange(byte[])</a>
 <div class="block"><span class="deprecationComment">This is made @InterfaceAudience.Private in the 2.0 line and above and may be
  changed to private or removed in 3.0.</span></div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="org/apache/hadoop/hbase/io/TimeRange.html#TimeRange-byte:A-byte:A-">org.apache.hadoop.hbase.io.TimeRange(byte[], byte[])</a>
 <div class="block"><span class="deprecationComment">This is made @InterfaceAudience.Private in the 2.0 line and above and may be
  changed to private or removed in 3.0.</span></div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="org/apache/hadoop/hbase/io/TimeRange.html#TimeRange-long-">org.apache.hadoop.hbase.io.TimeRange(long)</a>
 <div class="block"><span class="deprecationComment">This is made @InterfaceAudience.Private in the 2.0 line and above and may be
  changed to private or removed in 3.0.</span></div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="org/apache/hadoop/hbase/io/TimeRange.html#TimeRange-long-long-">org.apache.hadoop.hbase.io.TimeRange(long, long)</a>
 <div class="block"><span class="deprecationComment">This is made @InterfaceAudience.Private in the 2.0 line and above and may be
  changed to private or removed in 3.0.</span></div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a href="org/apache/hadoop/hbase/wal/WALEdit.html#WALEdit-boolean-">org.apache.hadoop.hbase.wal.WALEdit(boolean)</a>
 <div class="block"><span class="deprecationComment">since 2.0.1 and will be removed in 4.0.0. Use <a href="org/apache/hadoop/hbase/wal/WALEdit.html#WALEdit-int-boolean-"><code>WALEdit.WALEdit(int, boolean)</code></a>
    instead.</span></div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a href="org/apache/hadoop/hbase/wal/WALEdit.html#WALEdit-int-">org.apache.hadoop.hbase.wal.WALEdit(int)</a>
 <div class="block"><span class="deprecationComment">since 2.0.1 and will be removed in 4.0.0. Use <a href="org/apache/hadoop/hbase/wal/WALEdit.html#WALEdit-int-boolean-"><code>WALEdit.WALEdit(int, boolean)</code></a>
    instead.</span></div>
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index e891089..75bb675 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -7898,8 +7898,6 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html#BULK_LOAD_PREFIX_BYTES">BULK_LOAD_PREFIX_BYTES</a></span> - Static variable in class org.apache.hadoop.hbase.backup.impl.<a href="org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html" title="class in org.apache.hadoop.hbase.backup.impl">BackupSystemTable</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#BULK_LOAD_RENEW_TOKEN_TIME_BUFFER">BULK_LOAD_RENEW_TOKEN_TIME_BUFFER</a></span> - Static variable in class org.apache.hadoop.hbase.tool.<a href="org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFilesTool</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.html#BULK_OUTPUT_CONF_KEY">BULK_OUTPUT_CONF_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.backup.mapreduce.<a href="org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.html" title="class in org.apache.hadoop.hbase.backup.mapreduce">MapReduceHFileSplitterJob</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/mapreduce/Import.html#BULK_OUTPUT_CONF_KEY">BULK_OUTPUT_CONF_KEY</a></span> - Static variable in class org.apache.hadoop.hbase.mapreduce.<a href="org/apache/hadoop/hbase/mapreduce/Import.html" title="class in org.apache.hadoop.hbase.mapreduce">Import</a></dt>
@@ -34771,8 +34769,6 @@
 </dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/security/token/FsDelegationToken.html#FsDelegationToken-org.apache.hadoop.hbase.security.UserProvider-java.lang.String-">FsDelegationToken(UserProvider, String)</a></span> - Constructor for class org.apache.hadoop.hbase.security.token.<a href="org/apache/hadoop/hbase/security/token/FsDelegationToken.html" title="class in org.apache.hadoop.hbase.security.token">FsDelegationToken</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/security/token/FsDelegationToken.html#FsDelegationToken-org.apache.hadoop.hbase.security.UserProvider-java.lang.String-long-">FsDelegationToken(UserProvider, String, long)</a></span> - Constructor for class org.apache.hadoop.hbase.security.token.<a href="org/apache/hadoop/hbase/security/token/FsDelegationToken.html" title="class in org.apache.hadoop.hbase.security.token">FsDelegationToken</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#fsDelegationToken">fsDelegationToken</a></span> - Variable in class org.apache.hadoop.hbase.tool.<a href="org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFilesTool</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/ReaderContext.html#fsdis">fsdis</a></span> - Variable in class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/ReaderContext.html" title="class in org.apache.hadoop.hbase.io.hfile">ReaderContext</a></dt>
@@ -97347,8 +97343,6 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/master/replication/SyncReplicationReplayWALManager.html#renameToPeerSnapshotWALDir-java.lang.String-">renameToPeerSnapshotWALDir(String)</a></span> - Method in class org.apache.hadoop.hbase.master.replication.<a href="org/apache/hadoop/hbase/master/replication/SyncReplicationReplayWALManager.html" title="class in org.apache.hadoop.hbase.master.replication">SyncReplicationReplayWALManager</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/security/token/FsDelegationToken.html#renewAheadTime">renewAheadTime</a></span> - Variable in class org.apache.hadoop.hbase.security.token.<a href="org/apache/hadoop/hbase/security/token/FsDelegationToken.html" title="class in org.apache.hadoop.hbase.security.token">FsDelegationToken</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/security/token/FsDelegationToken.html#renewer">renewer</a></span> - Variable in class org.apache.hadoop.hbase.security.token.<a href="org/apache/hadoop/hbase/security/token/FsDelegationToken.html" title="class in org.apache.hadoop.hbase.security.token">FsDelegationToken</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.html#renewLease--">renewLease()</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.html" title="class in org.apache.hadoop.hbase.client">AsyncScanSingleRegionRpcRetryingCaller</a></dt>
@@ -99989,8 +99983,6 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/AsyncConnectionImpl.html#RETRY_TIMER">RETRY_TIMER</a></span> - Static variable in class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/AsyncConnectionImpl.html" title="class in org.apache.hadoop.hbase.client">AsyncConnectionImpl</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#retryAheadTime">retryAheadTime</a></span> - Variable in class org.apache.hadoop.hbase.tool.<a href="org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFilesTool</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/RetryCounter.html#retryConfig">retryConfig</a></span> - Variable in class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/RetryCounter.html" title="class in org.apache.hadoop.hbase.util">RetryCounter</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/util/RetryCounter.RetryConfig.html#RetryConfig--">RetryConfig()</a></span> - Constructor for class org.apache.hadoop.hbase.util.<a href="org/apache/hadoop/hbase/util/RetryCounter.RetryConfig.html" title="class in org.apache.hadoop.hbase.util">RetryCounter.RetryConfig</a></dt>
@@ -112393,6 +112385,13 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-">SnapshotDescription(String, TableName, SnapshotType, String)</a></span> - Constructor for class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/SnapshotDescription.html" title="class in org.apache.hadoop.hbase.client">SnapshotDescription [...]
 <dd>&nbsp;</dd>
+<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-">SnapshotDescription(String, TableName, SnapshotType, String, long, int)</a></span> - Constructor for class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/SnapshotDescription.html" title="class in org.apache.hadoop.hbase.client" [...]
+<dd>
+<div class="block"><span class="deprecatedLabel">Deprecated.</span>
+<div class="block"><span class="deprecationComment">since 2.3.0 and will be removed in 4.0.0. Use
+   <a href="org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-"><code>SnapshotDescription.SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map)</code></a></span></div>
+</div>
+</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-">SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map&lt;String, Object&gt;)</a></span> - Constructor for class org.apache.hadoop.hbase.client.<a href="org/apache/hadoop/hbase/client/SnapshotDescription.html" title= [...]
 <dd>
 <div class="block">SnapshotDescription Parameterized Constructor</div>
@@ -120185,8 +120184,6 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/security/provider/example/ShadeSaslAuthenticationProvider.html#TOKEN_KIND">TOKEN_KIND</a></span> - Static variable in class org.apache.hadoop.hbase.security.provider.example.<a href="org/apache/hadoop/hbase/security/provider/example/ShadeSaslAuthenticationProvider.html" title="class in org.apache.hadoop.hbase.security.provider.example">ShadeSaslAuthenticationProvider</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/security/token/FsDelegationToken.html#tokenExpireTime">tokenExpireTime</a></span> - Variable in class org.apache.hadoop.hbase.security.token.<a href="org/apache/hadoop/hbase/security/token/FsDelegationToken.html" title="class in org.apache.hadoop.hbase.security.token">FsDelegationToken</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/security/SecurityInfo.html#tokenKind">tokenKind</a></span> - Variable in class org.apache.hadoop.hbase.security.<a href="org/apache/hadoop/hbase/security/SecurityInfo.html" title="class in org.apache.hadoop.hbase.security">SecurityInfo</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.html#tokenMaxLifetime">tokenMaxLifetime</a></span> - Variable in class org.apache.hadoop.hbase.security.token.<a href="org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.html" title="class in org.apache.hadoop.hbase.security.token">AuthenticationTokenSecretManager</a></dt>
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index c9a1ef8..722d02c 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -166,10 +166,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupRestoreConstants.BackupCommand</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 1b1c1a1..a53f67d 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -4263,6 +4263,19 @@ service.</div>
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner)</code>&nbsp;</td>
 </tr>
 <tr class="altColor">
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-">SnapshotDescription</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+                   <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
+                   <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
+                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner,
+                   long&nbsp;creationTime,
+                   int&nbsp;version)</code>
+<div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;
+<div class="block"><span class="deprecationComment">since 2.3.0 and will be removed in 4.0.0. Use
+   <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-"><code>SnapshotDescription.SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map)</code></a></span></div>
+</div>
+</td>
+</tr>
+<tr class="rowColor">
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-">SnapshotDescription</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                    <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
                    <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
@@ -4273,26 +4286,26 @@ service.</div>
 <div class="block">SnapshotDescription Parameterized Constructor</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.SplitTableRegionProcedureBiConsumer.html#SplitTableRegionProcedureBiConsumer-org.apache.hadoop.hbase.TableName-">SplitTableRegionProcedureBiConsumer</a></span>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/TableBuilderBase.html#TableBuilderBase-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.ConnectionConfiguration-">TableBuilderBase</a></span>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                 <a href="../../../../../org/apache/hadoop/hbase/client/ConnectionConfiguration.html" title="class in org.apache.hadoop.hbase.client">ConnectionConfiguration</a>&nbsp;connConf)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptorBuilder.html#TableDescriptorBuilder-org.apache.hadoop.hbase.TableName-">TableDescriptorBuilder</a></span>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;name)</code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TableProcedureBiConsumer.html#TableProcedureBiConsumer-org.apache.hadoop.hbase.TableName-">TableProcedureBiConsumer</a></span>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/TableState.html#TableState-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.TableState.State-">TableState</a></span>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
           <a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client">TableState.State</a>&nbsp;state)</code>
 <div class="block">Create instance of TableState.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.TruncateTableProcedureBiConsumer.html#TruncateTableProcedureBiConsumer-org.apache.hadoop.hbase.TableName-">TruncateTableProcedureBiConsumer</a></span>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)</code>&nbsp;</td>
 </tr>
 </tbody>
diff --git a/devapidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html b/devapidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html
index fd50c2a..8e6ee0c 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Public
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.31">SnapshotDescription</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.32">SnapshotDescription</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">The POJO equivalent of HBaseProtos.SnapshotDescription</div>
 </li>
@@ -200,6 +200,19 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner)</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
+<td class="colOne"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-">SnapshotDescription</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+                   <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
+                   <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
+                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner,
+                   long&nbsp;creationTime,
+                   int&nbsp;version)</code>
+<div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;
+<div class="block"><span class="deprecationComment">since 2.3.0 and will be removed in 4.0.0. Use
+   <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-"><code>SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map)</code></a></span></div>
+</div>
+</td>
+</tr>
+<tr class="altColor">
 <td class="colOne"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-">SnapshotDescription</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                    <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
                    <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
@@ -293,7 +306,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>name</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.32">name</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.33">name</a></pre>
 </li>
 </ul>
 <a name="table">
@@ -302,7 +315,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>table</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.33">table</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.34">table</a></pre>
 </li>
 </ul>
 <a name="snapShotType">
@@ -311,7 +324,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>snapShotType</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.34">snapShotType</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.35">snapShotType</a></pre>
 </li>
 </ul>
 <a name="owner">
@@ -320,7 +333,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>owner</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.35">owner</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.36">owner</a></pre>
 </li>
 </ul>
 <a name="creationTime">
@@ -329,7 +342,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>creationTime</h4>
-<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.36">creationTime</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.37">creationTime</a></pre>
 </li>
 </ul>
 <a name="ttl">
@@ -338,7 +351,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>ttl</h4>
-<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.37">ttl</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.38">ttl</a></pre>
 </li>
 </ul>
 <a name="version">
@@ -347,7 +360,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>version</h4>
-<pre>private final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.38">version</a></pre>
+<pre>private final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.39">version</a></pre>
 </li>
 </ul>
 </li>
@@ -364,7 +377,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>SnapshotDescription</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.40">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.41">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 </li>
 </ul>
 <a name="SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-">
@@ -373,7 +386,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>SnapshotDescription</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.44">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.45">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                            <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table)</pre>
 </li>
 </ul>
@@ -383,7 +396,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>SnapshotDescription</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.48">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.49">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                            <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
                            <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type)</pre>
 </li>
@@ -394,19 +407,46 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>SnapshotDescription</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.52">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.53">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                            <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
                            <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner)</pre>
 </li>
 </ul>
+<a name="SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>SnapshotDescription</h4>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
+public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.70">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+                                       <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
+                                       <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
+                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner,
+                                       long&nbsp;creationTime,
+                                       int&nbsp;version)</pre>
+<div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">since 2.3.0 and will be removed in 4.0.0. Use
+   <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-"><code>SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map)</code></a></span></div>
+<div class="block">SnapshotDescription Parameterized Constructor</div>
+<dl>
+<dt><span class="paramLabel">Parameters:</span></dt>
+<dd><code>name</code> - Name of the snapshot</dd>
+<dd><code>table</code> - TableName associated with the snapshot</dd>
+<dd><code>type</code> - Type of the snapshot - enum SnapshotType</dd>
+<dd><code>owner</code> - Snapshot Owner</dd>
+<dd><code>creationTime</code> - Creation time for Snapshot</dd>
+<dd><code>version</code> - Snapshot Version</dd>
+</dl>
+</li>
+</ul>
 <a name="SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>SnapshotDescription</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.67">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.86">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                            <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
                            <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner,
@@ -432,7 +472,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SnapshotDescription</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.90">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;snapshotName,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.109">SnapshotDescription</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;snapshotName,
                            <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                            <a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&gt;&nbsp;snapshotProps)</pre>
@@ -460,7 +500,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getTtlFromSnapshotProps</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.78">getTtlFromSnapshotProps</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/l [...]
+<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.97">getTtlFromSnapshotProps</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/l [...]
 </li>
 </ul>
 <a name="getName--">
@@ -469,7 +509,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getName</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.95">getName</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.114">getName</a>()</pre>
 </li>
 </ul>
 <a name="getTableNameAsString--">
@@ -478,7 +518,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableNameAsString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.99">getTableNameAsString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.118">getTableNameAsString</a>()</pre>
 </li>
 </ul>
 <a name="getTableName--">
@@ -487,7 +527,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getTableName</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.103">getTableName</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.122">getTableName</a>()</pre>
 </li>
 </ul>
 <a name="getType--">
@@ -496,7 +536,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getType</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.107">getType</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.126">getType</a>()</pre>
 </li>
 </ul>
 <a name="getOwner--">
@@ -505,7 +545,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getOwner</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.111">getOwner</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.130">getOwner</a>()</pre>
 </li>
 </ul>
 <a name="getCreationTime--">
@@ -514,7 +554,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getCreationTime</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.115">getCreationTime</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.134">getCreationTime</a>()</pre>
 </li>
 </ul>
 <a name="getTtl--">
@@ -523,7 +563,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getTtl</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.120">getTtl</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.139">getTtl</a>()</pre>
 </li>
 </ul>
 <a name="getVersion--">
@@ -532,7 +572,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getVersion</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.124">getVersion</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.143">getVersion</a>()</pre>
 </li>
 </ul>
 <a name="toString--">
@@ -541,7 +581,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.129">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html#line.148">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html b/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html
index 26e4780..23e857a 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html
@@ -204,6 +204,19 @@ the order they are declared.</div>
                    <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner)</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
+<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-">SnapshotDescription</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+                   <a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
+                   <a href="../../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
+                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;owner,
+                   long&nbsp;creationTime,
+                   int&nbsp;version)</code>
+<div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;
+<div class="block"><span class="deprecationComment">since 2.3.0 and will be removed in 4.0.0. Use
+   <a href="../../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-"><code>SnapshotDescription.SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map)</code></a></span></div>
+</div>
+</td>
+</tr>
+<tr class="altColor">
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/client/SnapshotDescription.html#SnapshotDescription-java.lang.String-org.apache.hadoop.hbase.TableName-org.apache.hadoop.hbase.client.SnapshotType-java.lang.String-long-int-java.util.Map-">SnapshotDescription</a></span>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                    <a href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;table,
                    <a href="../../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client">SnapshotType</a>&nbsp;type,
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index de4a334..bea9c99 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -440,23 +440,23 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/LogQueryFilter.FilterByOperator.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">LogQueryFilter.FilterByOperator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/LogQueryFilter.Type.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">LogQueryFilter.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/LogQueryFilter.Type.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">LogQueryFilter.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/LogQueryFilter.FilterByOperator.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">LogQueryFilter.FilterByOperator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html b/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
index 9a728ec..6b85449 100644
--- a/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
@@ -182,8 +182,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">RegionObserver.MutationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.MetaTableOps.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">MetaTableMetrics.MetaTableOps</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">RegionObserver.MutationType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index cf91b39..d5accf7 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -191,12 +191,12 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/hbtop/field/package-tree.html b/devapidocs/org/apache/hadoop/hbase/hbtop/field/package-tree.html
index d9bf09d..cd793c2 100644
--- a/devapidocs/org/apache/hadoop/hbase/hbtop/field/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/hbtop/field/package-tree.html
@@ -92,8 +92,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.hbtop.field.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/field/FieldValueType.html" title="enum in org.apache.hadoop.hbase.hbtop.field"><span class="typeNameLink">FieldValueType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.hbtop.field.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/field/Field.html" title="enum in org.apache.hadoop.hbase.hbtop.field"><span class="typeNameLink">Field</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.hbtop.field.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/field/FieldValueType.html" title="enum in org.apache.hadoop.hbase.hbtop.field"><span class="typeNameLink">FieldValueType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html
index b8f3866..007e90f 100644
--- a/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/hbtop/terminal/package-tree.html
@@ -107,8 +107,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.hbtop.terminal.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/terminal/Color.html" title="enum in org.apache.hadoop.hbase.hbtop.terminal"><span class="typeNameLink">Color</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.hbtop.terminal.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/terminal/KeyPress.Type.html" title="enum in org.apache.hadoop.hbase.hbtop.terminal"><span class="typeNameLink">KeyPress.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.hbtop.terminal.<a href="../../../../../../org/apache/hadoop/hbase/hbtop/terminal/Color.html" title="enum in org.apache.hadoop.hbase.hbtop.terminal"><span class="typeNameLink">Color</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/http/package-tree.html b/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
index d47599b..df1e731 100644
--- a/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/http/package-tree.html
@@ -140,8 +140,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Output.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Output</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/HttpConfig.Policy.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">HttpConfig.Policy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Output.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Output</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.http.<a href="../../../../../org/apache/hadoop/hbase/http/ProfileServlet.Event.html" title="enum in org.apache.hadoop.hbase.http"><span class="typeNameLink">ProfileServlet.Event</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index ba09866..0c1a938 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -301,12 +301,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/ReaderContext.ReaderType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">ReaderContext.ReaderType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCacheFactory.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockCacheFactory.ExternalBlockCaches</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/ReaderContext.ReaderType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">ReaderContext.ReaderType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index 33d69d6..28b5056 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -356,9 +356,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.SourceStorage.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">MetricsHBaseServerSourceFactoryImpl.SourceStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallAction</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/CallEvent.Type.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">CallEvent.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallAction</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.SourceStorage.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">MetricsHBaseServerSourceFactoryImpl.SourceStorage</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index a9b83ba..b7390b1 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -296,10 +296,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/CellCounter.CellCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">CellCounter.CellCounterMapper.Counters</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
index 39196e0..4aadbb8 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
@@ -199,8 +199,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.balancer.<a href="../../../../../../org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html" title="enum in org.apache.hadoop.hbase.master.balancer"><span class="typeNameLink">BaseLoadBalancer.Cluster.LocalityType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.balancer.<a href="../../../../../../org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html" title="enum in org.apache.hadoop.hbase.master.balancer"><span class="typeNameLink">BaseLoadBalancer.Cluster.Action.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.balancer.<a href="../../../../../../org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html" title="enum in org.apache.hadoop.hbase.master.balancer"><span class="typeNameLink">BaseLoadBalancer.Cluster.LocalityType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index 48093c0..65ec948 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -355,13 +355,13 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetaRegionLocationCache.ZNodeOpType.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetaRegionLocationCache.ZNodeOpType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">RegionState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetaRegionLocationCache.ZNodeOpType.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetaRegionLocationCache.ZNodeOpType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/ServerManager.ServerLiveState.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">ServerManager.ServerLiveState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">RegionState.State</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index 732b701..283e615 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -222,10 +222,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MetaProcedureInterface.MetaOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">MetaProcedureInterface.MetaOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">TableProcedureInterface.TableOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MetaProcedureInterface.MetaOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">MetaProcedureInterface.MetaOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html b/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
index 5033f12..23db4ce 100644
--- a/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
@@ -126,8 +126,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.State.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">MonitoredTask.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/TaskMonitor.TaskFilter.TaskType.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">TaskMonitor.TaskFilter.TaskType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.State.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">MonitoredTask.State</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index 49a145a..f0ea06c 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -428,19 +428,19 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index 4aef9f8..73017c2 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -216,10 +216,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.LockState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">Procedure.LockState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">StateMachineProcedure.Flow</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/package-tree.html
index 7b64815..2525a0e 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/package-tree.html
@@ -133,8 +133,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure2.store.wal.<a href="../../../../../../../org/apache/hadoop/hbase/procedure2/store/wal/ProcedureStoreTracker.DeleteState.html" title="enum in org.apache.hadoop.hbase.procedure2.store.wal"><span class="typeNameLink">ProcedureStoreTracker.DeleteState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.store.wal.<a href="../../../../../../../org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.PushType.html" title="enum in org.apache.hadoop.hbase.procedure2.store.wal"><span class="typeNameLink">WALProcedureStore.PushType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.store.wal.<a href="../../../../../../../org/apache/hadoop/hbase/procedure2/store/wal/ProcedureStoreTracker.DeleteState.html" title="enum in org.apache.hadoop.hbase.procedure2.store.wal"><span class="typeNameLink">ProcedureStoreTracker.DeleteState</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index 8f02efa..1446fef 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -240,12 +240,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 6b7bce3..692436e 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -731,20 +731,20 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index 7f09712..6ef0a33 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -130,9 +130,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.MatchCode.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">ScanQueryMatcher.MatchCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
index 579ba79..193f1c6 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
@@ -247,10 +247,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
+<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrResult.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">ProtobufLogReader.WALHdrResult</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.Type.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">RingBufferTruck.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/CompressionContext.DictionaryIndex.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">CompressionContext.DictionaryIndex</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.RollRequestReason.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">WALActionsListener.RollRequestReason</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrResult.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">ProtobufLogReader.WALHdrResult</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html b/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
index 732dfb2..581ff15 100644
--- a/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
@@ -166,8 +166,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.replication.<a href="../../../../../org/apache/hadoop/hbase/replication/ReplicationPeer.PeerState.html" title="enum in org.apache.hadoop.hbase.replication"><span class="typeNameLink">ReplicationPeer.PeerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.replication.<a href="../../../../../org/apache/hadoop/hbase/replication/SyncReplicationState.html" title="enum in org.apache.hadoop.hbase.replication"><span class="typeNameLink">SyncReplicationState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.replication.<a href="../../../../../org/apache/hadoop/hbase/replication/ReplicationPeer.PeerState.html" title="enum in org.apache.hadoop.hbase.replication"><span class="typeNameLink">ReplicationPeer.PeerState</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
index 63cfa9a..43b5325 100644
--- a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
@@ -207,8 +207,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.WorkerState.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">ReplicationSourceShipper.WorkerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceFactoryImpl.SourceHolder.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">MetricsReplicationSourceFactoryImpl.SourceHolder</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.WorkerState.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">ReplicationSourceShipper.WorkerState</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
index 49cee7a..29be652 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
@@ -162,12 +162,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.HDFSAclOperation.AclType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">SnapshotScannerHDFSAclHelper.HDFSAclOperation.AclType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessController.OpType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Action.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Action</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.HDFSAclOperation.OperationType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">SnapshotScannerHDFSAclHelper.HDFSAclOperation.OperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessController.OpType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Scope.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Scope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclHelper.HDFSAclOperation.OperationType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">SnapshotScannerHDFSAclHelper.HDFSAclOperation.OperationType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/class-use/UserProvider.html b/devapidocs/org/apache/hadoop/hbase/security/class-use/UserProvider.html
index 7824c96..8ed7464 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/class-use/UserProvider.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/class-use/UserProvider.html
@@ -426,11 +426,6 @@ service.</div>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/security/token/FsDelegationToken.html#FsDelegationToken-org.apache.hadoop.hbase.security.UserProvider-java.lang.String-">FsDelegationToken</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a>&nbsp;userProvider,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;renewer)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/security/token/FsDelegationToken.html#FsDelegationToken-org.apache.hadoop.hbase.security.UserProvider-java.lang.String-long-">FsDelegationToken</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a>&nbsp;userProvider,
-                 <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;renewer,
-                 long&nbsp;renewAheadTime)</code>&nbsp;</td>
-</tr>
 </tbody>
 </table>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
index c793825..062071e 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
@@ -189,9 +189,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">AuthMethod</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslUtil.QualityOfProtection.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslUtil.QualityOfProtection</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslUtil.QualityOfProtection.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslUtil.QualityOfProtection</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">AuthMethod</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/security/token/FsDelegationToken.html b/devapidocs/org/apache/hadoop/hbase/security/token/FsDelegationToken.html
index 0f2540f..d80c889 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/token/FsDelegationToken.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/token/FsDelegationToken.html
@@ -147,17 +147,9 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/security/token/FsDelegationToken.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>private long</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/security/token/FsDelegationToken.html#renewAheadTime">renewAheadTime</a></span></code>&nbsp;</td>
-</tr>
-<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/security/token/FsDelegationToken.html#renewer">renewer</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
-<td class="colFirst"><code>private long</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/security/token/FsDelegationToken.html#tokenExpireTime">tokenExpireTime</a></span></code>&nbsp;</td>
-</tr>
 <tr class="altColor">
 <td class="colFirst"><code>private <a href="../../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/security/token/FsDelegationToken.html#userProvider">userProvider</a></span></code>&nbsp;</td>
@@ -184,11 +176,6 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colOne"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/security/token/FsDelegationToken.html#FsDelegationToken-org.apache.hadoop.hbase.security.UserProvider-java.lang.String-">FsDelegationToken</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a>&nbsp;userProvider,
                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;renewer)</code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
-<td class="colOne"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/security/token/FsDelegationToken.html#FsDelegationToken-org.apache.hadoop.hbase.security.UserProvider-java.lang.String-long-">FsDelegationToken</a></span>(<a href="../../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a>&nbsp;userProvider,
-                 <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;renewer,
-                 long&nbsp;renewAheadTime)</code>&nbsp;</td>
-</tr>
 </table>
 </li>
 </ul>
@@ -309,30 +296,12 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <a name="fs">
 <!--   -->
 </a>
-<ul class="blockList">
+<ul class="blockListLast">
 <li class="blockList">
 <h4>fs</h4>
 <pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.52">fs</a></pre>
 </li>
 </ul>
-<a name="tokenExpireTime">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>tokenExpireTime</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.53">tokenExpireTime</a></pre>
-</li>
-</ul>
-<a name="renewAheadTime">
-<!--   -->
-</a>
-<ul class="blockListLast">
-<li class="blockList">
-<h4>renewAheadTime</h4>
-<pre>private&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.54">renewAheadTime</a></pre>
-</li>
-</ul>
 </li>
 </ul>
 <!-- ========= CONSTRUCTOR DETAIL ======== -->
@@ -344,27 +313,11 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <a name="FsDelegationToken-org.apache.hadoop.hbase.security.UserProvider-java.lang.String-">
 <!--   -->
 </a>
-<ul class="blockList">
-<li class="blockList">
-<h4>FsDelegationToken</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.59">FsDelegationToken</a>(<a href="../../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a>&nbsp;userProvider,
-                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;renewer)</pre>
-</li>
-</ul>
-<a name="FsDelegationToken-org.apache.hadoop.hbase.security.UserProvider-java.lang.String-long-">
-<!--   -->
-</a>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FsDelegationToken</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.68">FsDelegationToken</a>(<a href="../../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a>&nbsp;userProvider,
-                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;renewer,
-                         long&nbsp;renewAheadTime)</pre>
-<dl>
-<dt><span class="paramLabel">Parameters:</span></dt>
-<dd><code>renewer</code> - the account name that is allowed to renew the token.</dd>
-<dd><code>renewAheadTime</code> - how long in millis</dd>
-</dl>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.57">FsDelegationToken</a>(<a href="../../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a>&nbsp;userProvider,
+                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;renewer)</pre>
 </li>
 </ul>
 </li>
@@ -381,7 +334,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>acquireDelegationToken</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.83">acquireDelegationToken</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.70">acquireDelegationToken</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Acquire the delegation token for the specified filesystem.
  Before requesting a new delegation token, tries to find one already available.
@@ -400,7 +353,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>acquireDelegationToken</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.110">acquireDelegationToken</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;tokenKind,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.97">acquireDelegationToken</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;tokenKind,
                                    org.apache.hadoop.fs.FileSystem&nbsp;fs)
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Acquire the delegation token for the specified filesystem and token kind.
@@ -420,7 +373,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>releaseDelegationToken</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.141">releaseDelegationToken</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.121">releaseDelegationToken</a>()</pre>
 <div class="block">Releases a previously acquired delegation token.</div>
 </li>
 </ul>
@@ -430,7 +383,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getUserProvider</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.155">getUserProvider</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.135">getUserProvider</a>()</pre>
 </li>
 </ul>
 <a name="getRenewer--">
@@ -439,7 +392,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getRenewer</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.162">getRenewer</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.142">getRenewer</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the account name that is allowed to renew the token.</dd>
@@ -452,7 +405,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getUserToken</h4>
-<pre>public&nbsp;org.apache.hadoop.security.token.Token&lt;?&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.169">getUserToken</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.security.token.Token&lt;?&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.149">getUserToken</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the delegation token acquired, or null in case it was not acquired</dd>
@@ -465,7 +418,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getFileSystem</h4>
-<pre>public&nbsp;org.apache.hadoop.fs.FileSystem&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.173">getFileSystem</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.fs.FileSystem&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html#line.153">getFileSystem</a>()</pre>
 </li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
index adc046f..1cc1670 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
@@ -211,8 +211,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ImplType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ImplType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftMetrics.ThriftServerType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftMetrics.ThriftServerType</span></a></li>
 </ul>
 </li>
diff --git a/devapidocs/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html b/devapidocs/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html
index d4decc0..5e7c3e9 100644
--- a/devapidocs/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html
+++ b/devapidocs/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static interface <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.224">BulkLoadHFilesTool.BulkHFileVisitor</a>&lt;TFamily&gt;</pre>
+<pre>private static interface <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.218">BulkLoadHFilesTool.BulkHFileVisitor</a>&lt;TFamily&gt;</pre>
 </li>
 </ul>
 </div>
@@ -154,7 +154,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockList">
 <li class="blockList">
 <h4>bulkFamily</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html" title="type parameter in BulkLoadHFilesTool.BulkHFileVisitor">TFamily</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html#line.226">bulkFamily</a>(byte[]&nbsp;familyName)
+<pre><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html" title="type parameter in BulkLoadHFilesTool.BulkHFileVisitor">TFamily</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html#line.220">bulkFamily</a>(byte[]&nbsp;familyName)
             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -170,7 +170,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockListLast">
 <li class="blockList">
 <h4>bulkHFile</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html#line.228">bulkHFile</a>(<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html" title="type parameter in BulkLoadHFilesTool.BulkHFileVisitor">TFamily</a>&nbsp;family,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html#line.222">bulkHFile</a>(<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html" title="type parameter in BulkLoadHFilesTool.BulkHFileVisitor">TFamily</a>&nbsp;family,
                org.apache.hadoop.fs.FileStatus&nbsp;hfileStatus)
         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
diff --git a/devapidocs/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html b/devapidocs/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html
index 18dc66c..28f5a2d 100644
--- a/devapidocs/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html
+++ b/devapidocs/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html
@@ -180,53 +180,45 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#BULK_LOAD_RENEW_TOKEN_TIME_BUFFER">BULK_LOAD_RENEW_TOKEN_TIME_BUFFER</a></span></code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#bulkLoadByFamily">bulkLoadByFamily</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#bulkToken">bulkToken</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#clusterIds">clusterIds</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private <a href="../../../../../org/apache/hadoop/hbase/security/token/FsDelegationToken.html" title="class in org.apache.hadoop.hbase.security.token">FsDelegationToken</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#fsDelegationToken">fsDelegationToken</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#maxFilesPerRegionPerFamily">maxFilesPerRegionPerFamily</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#NAME">NAME</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private int</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#nrThreads">nrThreads</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colFirst"><code>private <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#numRetries">numRetries</a></span></code>&nbsp;</td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colFirst"><code>private boolean</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#replicate">replicate</a></span></code>&nbsp;</td>
 </tr>
-<tr class="rowColor">
-<td class="colFirst"><code>private long</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#retryAheadTime">retryAheadTime</a></span></code>&nbsp;</td>
-</tr>
 <tr class="altColor">
 <td class="colFirst"><code>(package private) static <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#TMP_DIR">TMP_DIR</a></span></code>&nbsp;</td>
@@ -644,26 +636,13 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 </dl>
 </li>
 </ul>
-<a name="BULK_LOAD_RENEW_TOKEN_TIME_BUFFER">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>BULK_LOAD_RENEW_TOKEN_TIME_BUFFER</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.129">BULK_LOAD_RENEW_TOKEN_TIME_BUFFER</a></pre>
-<dl>
-<dt><span class="seeLabel">See Also:</span></dt>
-<dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.tool.BulkLoadHFilesTool.BULK_LOAD_RENEW_TOKEN_TIME_BUFFER">Constant Field Values</a></dd>
-</dl>
-</li>
-</ul>
 <a name="TMP_DIR">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>TMP_DIR</h4>
-<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.134">TMP_DIR</a></pre>
+<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.130">TMP_DIR</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.tool.BulkLoadHFilesTool.TMP_DIR">Constant Field Values</a></dd>
@@ -676,7 +655,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>maxFilesPerRegionPerFamily</h4>
-<pre>private final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.136">maxFilesPerRegionPerFamily</a></pre>
+<pre>private final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.132">maxFilesPerRegionPerFamily</a></pre>
 </li>
 </ul>
 <a name="assignSeqIds">
@@ -685,7 +664,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>assignSeqIds</h4>
-<pre>private final&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.137">assignSeqIds</a></pre>
+<pre>private final&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.133">assignSeqIds</a></pre>
 </li>
 </ul>
 <a name="bulkLoadByFamily">
@@ -694,7 +673,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>bulkLoadByFamily</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.138">bulkLoadByFamily</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.134">bulkLoadByFamily</a></pre>
 </li>
 </ul>
 <a name="fsDelegationToken">
@@ -703,7 +682,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>fsDelegationToken</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/security/token/FsDelegationToken.html" title="class in org.apache.hadoop.hbase.security.token">FsDelegationToken</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.141">fsDelegationToken</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/security/token/FsDelegationToken.html" title="class in org.apache.hadoop.hbase.security.token">FsDelegationToken</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.137">fsDelegationToken</a></pre>
 </li>
 </ul>
 <a name="userProvider">
@@ -712,7 +691,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>userProvider</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.142">userProvider</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.138">userProvider</a></pre>
 </li>
 </ul>
 <a name="nrThreads">
@@ -721,7 +700,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>nrThreads</h4>
-<pre>private final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.143">nrThreads</a></pre>
+<pre>private final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.139">nrThreads</a></pre>
 </li>
 </ul>
 <a name="numRetries">
@@ -730,7 +709,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>numRetries</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.144">numRetries</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.140">numRetries</a></pre>
 </li>
 </ul>
 <a name="bulkToken">
@@ -739,7 +718,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>bulkToken</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.145">bulkToken</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.141">bulkToken</a></pre>
 </li>
 </ul>
 <a name="clusterIds">
@@ -748,25 +727,16 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>clusterIds</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.147">clusterIds</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.143">clusterIds</a></pre>
 </li>
 </ul>
 <a name="replicate">
 <!--   -->
 </a>
-<ul class="blockList">
-<li class="blockList">
-<h4>replicate</h4>
-<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.148">replicate</a></pre>
-</li>
-</ul>
-<a name="retryAheadTime">
-<!--   -->
-</a>
 <ul class="blockListLast">
 <li class="blockList">
-<h4>retryAheadTime</h4>
-<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.149">retryAheadTime</a></pre>
+<h4>replicate</h4>
+<pre>private&nbsp;boolean <a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.144">replicate</a></pre>
 </li>
 </ul>
 </li>
@@ -783,7 +753,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BulkLoadHFilesTool</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.151">BulkLoadHFilesTool</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.146">BulkLoadHFilesTool</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
 </li>
@@ -800,7 +770,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>createExecutorService</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true" title="class or interface in java.util.concurrent">ExecutorService</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.167">createExecutorService</a>()</pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true" title="class or interface in java.util.concurrent">ExecutorService</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.161">createExecutorService</a>()</pre>
 </li>
 </ul>
 <a name="isCreateTable--">
@@ -809,7 +779,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>isCreateTable</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.175">isCreateTable</a>()</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.169">isCreateTable</a>()</pre>
 </li>
 </ul>
 <a name="isSilence--">
@@ -818,7 +788,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>isSilence</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.179">isSilence</a>()</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.173">isSilence</a>()</pre>
 </li>
 </ul>
 <a name="isAlwaysCopyFiles--">
@@ -827,7 +797,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>isAlwaysCopyFiles</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.183">isAlwaysCopyFiles</a>()</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.177">isAlwaysCopyFiles</a>()</pre>
 </li>
 </ul>
 <a name="shouldCopyHFileMetaKey-byte:A-">
@@ -836,7 +806,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCopyHFileMetaKey</h4>
-<pre>private static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.187">shouldCopyHFileMetaKey</a>(byte[]&nbsp;key)</pre>
+<pre>private static&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.181">shouldCopyHFileMetaKey</a>(byte[]&nbsp;key)</pre>
 </li>
 </ul>
 <a name="validateFamiliesInHFiles-org.apache.hadoop.hbase.client.TableDescriptor-java.util.Deque-boolean-">
@@ -845,7 +815,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>validateFamiliesInHFiles</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.199">validateFamiliesInHFiles</a>(<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&nbsp;tableDesc,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.193">validateFamiliesInHFiles</a>(<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&nbsp;tableDesc,
                                              <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true" title="class or interface in java.util">Deque</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&nbsp;queue,
                                              boolean&nbsp;silence)
                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -862,7 +832,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>populateLoadQueue</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.220">populateLoadQueue</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true" title="class or interface in java.util">Deque</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&nbsp;ret,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.214">populateLoadQueue</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true" title="class or interface in java.util">Deque</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&nbsp;ret,
                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&gt;&nbsp;map)</pre>
 <div class="block">Populate the Queue with given HFiles</div>
 </li>
@@ -873,7 +843,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>visitBulkHFiles</h4>
-<pre>private static&nbsp;&lt;TFamily&gt;&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.236">visitBulkHFiles</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>private static&nbsp;&lt;TFamily&gt;&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.230">visitBulkHFiles</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                               org.apache.hadoop.fs.Path&nbsp;bulkDir,
                                               <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html" title="interface in org.apache.hadoop.hbase.tool">BulkLoadHFilesTool.BulkHFileVisitor</a>&lt;TFamily&gt;&nbsp;visitor,
                                               boolean&nbsp;validateHFile)
@@ -893,7 +863,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>discoverLoadQueue</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.298">discoverLoadQueue</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.292">discoverLoadQueue</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true" title="class or interface in java.util">Deque</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&nbsp;ret,
                                       org.apache.hadoop.fs.Path&nbsp;hfofDir,
                                       boolean&nbsp;validateHFile)
@@ -911,7 +881,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>prepareHFileQueue</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.329">prepareHFileQueue</a>(<a href="../../../../../org/apache/hadoop/hbase/client/AsyncClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">AsyncClusterConnection</a>&nbsp;conn,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.323">prepareHFileQueue</a>(<a href="../../../../../org/apache/hadoop/hbase/client/AsyncClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">AsyncClusterConnection</a>&nbsp;conn,
                                      <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&gt;&nbsp;map,
                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true" title="class or interface in java.util">Deque</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&nbsp;queue,
@@ -937,7 +907,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>prepareHFileQueue</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.346">prepareHFileQueue</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.340">prepareHFileQueue</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                      <a href="../../../../../org/apache/hadoop/hbase/client/AsyncClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">AsyncClusterConnection</a>&nbsp;conn,
                                      <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                                      org.apache.hadoop.fs.Path&nbsp;hfilesDir,
@@ -965,7 +935,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>loadHFileQueue</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.366">loadHFileQueue</a>(<a href="../../../../../org/apache/hadoop/hbase/client/AsyncClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">AsyncClusterConnection</a>&nbsp;conn,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.360">loadHFileQueue</a>(<a href="../../../../../org/apache/hadoop/hbase/client/AsyncClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">AsyncClusterConnection</a>&nbsp;conn,
                            <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true" title="class or interface in java.util">Deque</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&nbsp;queue,
                            boolean&nbsp;copyFiles)
@@ -993,7 +963,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>tryAtomicRegionLoad</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true" title="class or interface in java.util.concurrent">CompletableFuture</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.to [...]
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true" title="class or interface in java.util.concurrent">CompletableFuture</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.to [...]
                                                                                           <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                                                                                           boolean&nbsp;copyFiles,
                                                                                           byte[]&nbsp;first,
@@ -1020,7 +990,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>bulkLoadPhase</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.439">bulkLoadPhase</a>(<a href="../../../../../org/apache/hadoop/hbase/client/AsyncClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">AsyncClusterConnection</a>&nbsp;conn,
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.433">bulkLoadPhase</a>(<a href="../../../../../org/apache/hadoop/hbase/client/AsyncClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">AsyncClusterConnection</a>&nbsp;conn,
                              <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                              <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true" title="class or interface in java.util">Deque</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&nbsp;queue,
                              org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>,<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&nbsp;regionGroups,
@@ -1043,7 +1013,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>groupByFamilies</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&gt;& [...]
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&gt;& [...]
 </li>
 </ul>
 <a name="checkHFilesCountPerRegionPerFamily-org.apache.hbase.thirdparty.com.google.common.collect.Multimap-">
@@ -1052,7 +1022,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>checkHFilesCountPerRegionPerFamily</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.497">checkHFilesCountPerRegionPerFamily</a>(org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>,<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apac [...]
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.491">checkHFilesCountPerRegionPerFamily</a>(org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>,<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apac [...]
 </li>
 </ul>
 <a name="groupOrSplitPhase-org.apache.hadoop.hbase.client.AsyncClusterConnection-org.apache.hadoop.hbase.TableName-java.util.concurrent.ExecutorService-java.util.Deque-java.util.List-">
@@ -1061,7 +1031,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>groupOrSplitPhase</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>,<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">Bulk [...]
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>,<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">Bulk [...]
                                                                                                                                                     <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                                                                                                                                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true" title="class or interface in java.util.concurrent">ExecutorService</a>&nbsp;pool,
                                                                                                                                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true" title="class or interface in java.util">Deque</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&nbsp;queue,
@@ -1087,7 +1057,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>getUniqueName</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.572">getUniqueName</a>()</pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.566">getUniqueName</a>()</pre>
 </li>
 </ul>
 <a name="splitStoreFile-org.apache.hadoop.hbase.tool.BulkLoadHFiles.LoadQueueItem-org.apache.hadoop.hbase.client.TableDescriptor-byte:A-">
@@ -1096,7 +1066,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>splitStoreFile</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.576">splitStoreFile</a>(<a href="../../../../../org/apache/hadoop/ [...]
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.570">splitStoreFile</a>(<a href="../../../../../org/apache/hadoop/ [...]
                                                           <a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptor.html" title="interface in org.apache.hadoop.hbase.client">TableDescriptor</a>&nbsp;tableDesc,
                                                           byte[]&nbsp;splitKey)
                                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1112,7 +1082,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>groupOrSplit</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;,<a href="https://docs.oracle.com/javas [...]
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/Pair.html" title="class in org.apache.hadoop.hbase.util">Pair</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;,<a href="https://docs.oracle.com/javas [...]
                                                                        <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                                                                        org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>,<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&nbsp;regionGroups,
                                                                        <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&nbsp;item,
@@ -1135,7 +1105,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>splitStoreFile</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.704">splitStoreFile</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.698">splitStoreFile</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                            org.apache.hadoop.fs.Path&nbsp;inFile,
                            <a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>&nbsp;familyDesc,
                            byte[]&nbsp;splitKey,
@@ -1156,7 +1126,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>copyHFileHalf</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.717">copyHFileHalf</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.711">copyHFileHalf</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                   org.apache.hadoop.fs.Path&nbsp;inFile,
                                   org.apache.hadoop.fs.Path&nbsp;outFile,
                                   <a href="../../../../../org/apache/hadoop/hbase/io/Reference.html" title="class in org.apache.hadoop.hbase.io">Reference</a>&nbsp;reference,
@@ -1175,7 +1145,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>inferBoundaries</h4>
-<pre>public static&nbsp;byte[][]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.789">inferBoundaries</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;bdryMap)</pre>
+<pre>public static&nbsp;byte[][]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.783">inferBoundaries</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true" title="class or interface in java.lang">Integer</a>&gt;&nbsp;bdryMap)</pre>
 <div class="block">Infers region boundaries for a new table.
  <p/>
  Parameter: <br/>
@@ -1204,7 +1174,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>createTable</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.815">createTable</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.809">createTable</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                          org.apache.hadoop.fs.Path&nbsp;hfofDir,
                          <a href="../../../../../org/apache/hadoop/hbase/client/AsyncAdmin.html" title="interface in org.apache.hadoop.hbase.client">AsyncAdmin</a>&nbsp;admin)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -1222,7 +1192,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>performBulkLoad</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt;&nbsp;<a href=".. [...]
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt;&nbsp;<a href=".. [...]
                                                                      <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                                                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true" title="class or interface in java.util">Deque</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&nbsp;queue,
                                                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true" title="class or interface in java.util.concurrent">ExecutorService</a>&nbsp;pool,
@@ -1240,7 +1210,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>cleanup</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.915">cleanup</a>(<a href="../../../../../org/apache/hadoop/hbase/client/AsyncClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">AsyncClusterConnection</a>&nbsp;conn,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.909">cleanup</a>(<a href="../../../../../org/apache/hadoop/hbase/client/AsyncClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">AsyncClusterConnection</a>&nbsp;conn,
                      <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                      <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Deque.html?is-external=true" title="class or interface in java.util">Deque</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>&gt;&nbsp;queue,
                      <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true" title="class or interface in java.util.concurrent">ExecutorService</a>&nbsp;pool)
@@ -1257,7 +1227,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>doBulkLoad</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt;&nbsp;<a href=".. [...]
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt;&nbsp;<a href=".. [...]
                                                                 <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                                                                 <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&gt;&nbsp;map,
                                                                 boolean&nbsp;silence,
@@ -1282,7 +1252,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>doBulkLoad</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt;&nbsp;<a href=".. [...]
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt;&nbsp;<a href=".. [...]
                                                                 <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName,
                                                                 org.apache.hadoop.fs.Path&nbsp;hfofDir,
                                                                 boolean&nbsp;silence,
@@ -1308,7 +1278,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>bulkLoad</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt;&nbsp;<a href="../ [...]
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt;&nbsp;<a href="../ [...]
                                                              <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&gt;&nbsp;family2Files)
                                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.html#bulkLoad-org.apache.hadoop.hbase.TableName-java.util.Map-">BulkLoadHFiles</a></code></span></div>
@@ -1331,7 +1301,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>bulkLoad</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt;&nbsp;<a href="../ [...]
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.LoadQueueItem.html" title="class in org.apache.hadoop.hbase.tool">BulkLoadHFiles.LoadQueueItem</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&gt;&nbsp;<a href="../ [...]
                                                              org.apache.hadoop.fs.Path&nbsp;dir)
                                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.html#bulkLoad-org.apache.hadoop.hbase.TableName-org.apache.hadoop.fs.Path-">BulkLoadHFiles</a></code></span></div>
@@ -1355,7 +1325,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>tableExists</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1040">tableExists</a>(<a href="../../../../../org/apache/hadoop/hbase/client/AsyncClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">AsyncClusterConnection</a>&nbsp;conn,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1034">tableExists</a>(<a href="../../../../../org/apache/hadoop/hbase/client/AsyncClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">AsyncClusterConnection</a>&nbsp;conn,
                          <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tableName)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -1371,7 +1341,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>throwAndLogTableNotFoundException</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1046">throwAndLogTableNotFoundException</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tn)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1040">throwAndLogTableNotFoundException</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tn)
                                         throws <a href="../../../../../org/apache/hadoop/hbase/TableNotFoundException.html" title="class in org.apache.hadoop.hbase">TableNotFoundException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -1385,7 +1355,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>setBulkToken</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1052">setBulkToken</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;bulkToken)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1046">setBulkToken</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;bulkToken)</pre>
 </li>
 </ul>
 <a name="setClusterIds-java.util.List-">
@@ -1394,7 +1364,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>setClusterIds</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1056">setClusterIds</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;clusterIds)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1050">setClusterIds</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;clusterIds)</pre>
 </li>
 </ul>
 <a name="usage--">
@@ -1403,7 +1373,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>usage</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1060">usage</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1054">usage</a>()</pre>
 </li>
 </ul>
 <a name="run-java.lang.String:A-">
@@ -1412,7 +1382,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1077">run</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1071">run</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -1428,7 +1398,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>main</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1104">main</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1098">main</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>[]&nbsp;args)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -1442,7 +1412,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockList">
 <li class="blockList">
 <h4>disableReplication</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1111">disableReplication</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1105">disableReplication</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.html#disableReplication--">BulkLoadHFiles</a></code></span></div>
 <div class="block">Disables replication for all bulkloads done via this instance,
  when bulkload replication is configured.</div>
@@ -1458,7 +1428,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isReplicationDisabled</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1116">isReplicationDisabled</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html#line.1110">isReplicationDisabled</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.html#isReplicationDisabled--">isReplicationDisabled</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/tool/BulkLoadHFiles.html" title="interface in org.apache.hadoop.hbase.tool">BulkLoadHFiles</a></code></dd>
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index 36db810..5df72f2 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -546,15 +546,15 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLockWithObjectPool.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLockWithObjectPool.ReferenceType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/DNS.ServerType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">DNS.ServerType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HbckErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HbckErrorReporter.ERROR_CODE</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLockWithObjectPool.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLockWithObjectPool.ReferenceType</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
index b4a98b8..2dbf63c 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
@@ -195,8 +195,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true [...]
 <ul>
-<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.Providers.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALFactory.Providers</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/RegionGroupingProvider.Strategies.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">RegionGroupingProvider.Strategies</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.Providers.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALFactory.Providers</span></a></li>
 </ul>
 </li>
 </ul>
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html b/devapidocs/src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html
index 395dfc9..deed7fb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/SnapshotDescription.html
@@ -6,7 +6,7 @@
 </head>
 <body>
 <div class="sourceContainer">
-<pre><span class="sourceLineNo">001</span>/**<a name="line.1"></a>
+<pre><span class="sourceLineNo">001</span>/*<a name="line.1"></a>
 <span class="sourceLineNo">002</span> * Licensed to the Apache Software Foundation (ASF) under one<a name="line.2"></a>
 <span class="sourceLineNo">003</span> * or more contributor license agreements.  See the NOTICE file<a name="line.3"></a>
 <span class="sourceLineNo">004</span> * distributed with this work for additional information<a name="line.4"></a>
@@ -27,127 +27,144 @@
 <span class="sourceLineNo">019</span><a name="line.19"></a>
 <span class="sourceLineNo">020</span>import java.util.Map;<a name="line.20"></a>
 <span class="sourceLineNo">021</span><a name="line.21"></a>
-<span class="sourceLineNo">022</span>import org.apache.hadoop.hbase.TableName;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.23"></a>
-<span class="sourceLineNo">024</span><a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.25"></a>
-<span class="sourceLineNo">026</span><a name="line.26"></a>
-<span class="sourceLineNo">027</span>/**<a name="line.27"></a>
-<span class="sourceLineNo">028</span> * The POJO equivalent of HBaseProtos.SnapshotDescription<a name="line.28"></a>
-<span class="sourceLineNo">029</span> */<a name="line.29"></a>
-<span class="sourceLineNo">030</span>@InterfaceAudience.Public<a name="line.30"></a>
-<span class="sourceLineNo">031</span>public class SnapshotDescription {<a name="line.31"></a>
-<span class="sourceLineNo">032</span>  private final String name;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>  private final TableName table;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>  private final SnapshotType snapShotType;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>  private final String owner;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>  private final long creationTime;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>  private final long ttl;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>  private final int version;<a name="line.38"></a>
-<span class="sourceLineNo">039</span><a name="line.39"></a>
-<span class="sourceLineNo">040</span>  public SnapshotDescription(String name) {<a name="line.40"></a>
-<span class="sourceLineNo">041</span>    this(name, (TableName)null);<a name="line.41"></a>
-<span class="sourceLineNo">042</span>  }<a name="line.42"></a>
-<span class="sourceLineNo">043</span><a name="line.43"></a>
-<span class="sourceLineNo">044</span>  public SnapshotDescription(String name, TableName table) {<a name="line.44"></a>
-<span class="sourceLineNo">045</span>    this(name, table, SnapshotType.DISABLED, null, -1, -1, null);<a name="line.45"></a>
-<span class="sourceLineNo">046</span>  }<a name="line.46"></a>
-<span class="sourceLineNo">047</span><a name="line.47"></a>
-<span class="sourceLineNo">048</span>  public SnapshotDescription(String name, TableName table, SnapshotType type) {<a name="line.48"></a>
-<span class="sourceLineNo">049</span>    this(name, table, type, null, -1, -1, null);<a name="line.49"></a>
-<span class="sourceLineNo">050</span>  }<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>  public SnapshotDescription(String name, TableName table, SnapshotType type, String owner) {<a name="line.52"></a>
-<span class="sourceLineNo">053</span>    this(name, table, type, owner, -1, -1, null);<a name="line.53"></a>
-<span class="sourceLineNo">054</span>  }<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  /**<a name="line.56"></a>
-<span class="sourceLineNo">057</span>   * SnapshotDescription Parameterized Constructor<a name="line.57"></a>
-<span class="sourceLineNo">058</span>   *<a name="line.58"></a>
-<span class="sourceLineNo">059</span>   * @param name          Name of the snapshot<a name="line.59"></a>
-<span class="sourceLineNo">060</span>   * @param table         TableName associated with the snapshot<a name="line.60"></a>
-<span class="sourceLineNo">061</span>   * @param type          Type of the snapshot - enum SnapshotType<a name="line.61"></a>
-<span class="sourceLineNo">062</span>   * @param owner         Snapshot Owner<a name="line.62"></a>
-<span class="sourceLineNo">063</span>   * @param creationTime  Creation time for Snapshot<a name="line.63"></a>
-<span class="sourceLineNo">064</span>   * @param version       Snapshot Version<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   * @param snapshotProps Additional properties for snapshot e.g. TTL<a name="line.65"></a>
-<span class="sourceLineNo">066</span>   */<a name="line.66"></a>
-<span class="sourceLineNo">067</span>  public SnapshotDescription(String name, TableName table, SnapshotType type, String owner,<a name="line.67"></a>
-<span class="sourceLineNo">068</span>                             long creationTime, int version, Map&lt;String, Object&gt; snapshotProps) {<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    this.name = name;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    this.table = table;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>    this.snapShotType = type;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    this.owner = owner;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>    this.creationTime = creationTime;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    this.ttl = getTtlFromSnapshotProps(snapshotProps);<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    this.version = version;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>  }<a name="line.76"></a>
-<span class="sourceLineNo">077</span><a name="line.77"></a>
-<span class="sourceLineNo">078</span>  private long getTtlFromSnapshotProps(Map&lt;String, Object&gt; snapshotProps) {<a name="line.78"></a>
-<span class="sourceLineNo">079</span>    return MapUtils.getLongValue(snapshotProps, "TTL", -1);<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  }<a name="line.80"></a>
-<span class="sourceLineNo">081</span><a name="line.81"></a>
-<span class="sourceLineNo">082</span>  /**<a name="line.82"></a>
-<span class="sourceLineNo">083</span>   * SnapshotDescription Parameterized Constructor<a name="line.83"></a>
-<span class="sourceLineNo">084</span>   *<a name="line.84"></a>
-<span class="sourceLineNo">085</span>   * @param snapshotName  Name of the snapshot<a name="line.85"></a>
-<span class="sourceLineNo">086</span>   * @param tableName     TableName associated with the snapshot<a name="line.86"></a>
-<span class="sourceLineNo">087</span>   * @param type          Type of the snapshot - enum SnapshotType<a name="line.87"></a>
-<span class="sourceLineNo">088</span>   * @param snapshotProps Additional properties for snapshot e.g. TTL<a name="line.88"></a>
-<span class="sourceLineNo">089</span>   */<a name="line.89"></a>
-<span class="sourceLineNo">090</span>  public SnapshotDescription(String snapshotName, TableName tableName, SnapshotType type,<a name="line.90"></a>
-<span class="sourceLineNo">091</span>                             Map&lt;String, Object&gt; snapshotProps) {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    this(snapshotName, tableName, type, null, -1, -1, snapshotProps);<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  }<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  public String getName() {<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    return this.name;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  }<a name="line.97"></a>
-<span class="sourceLineNo">098</span><a name="line.98"></a>
-<span class="sourceLineNo">099</span>  public String getTableNameAsString() {<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    return this.table.getNameAsString();<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  }<a name="line.101"></a>
-<span class="sourceLineNo">102</span><a name="line.102"></a>
-<span class="sourceLineNo">103</span>  public TableName getTableName() {<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    return this.table;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
-<span class="sourceLineNo">106</span><a name="line.106"></a>
-<span class="sourceLineNo">107</span>  public SnapshotType getType() {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    return this.snapShotType;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
-<span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>  public String getOwner() {<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    return this.owner;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>  }<a name="line.113"></a>
-<span class="sourceLineNo">114</span><a name="line.114"></a>
-<span class="sourceLineNo">115</span>  public long getCreationTime() {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    return this.creationTime;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>  }<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>  // get snapshot ttl in sec<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  public long getTtl() {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    return ttl;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>  }<a name="line.122"></a>
-<span class="sourceLineNo">123</span><a name="line.123"></a>
-<span class="sourceLineNo">124</span>  public int getVersion() {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    return this.version;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
-<span class="sourceLineNo">127</span><a name="line.127"></a>
-<span class="sourceLineNo">128</span>  @Override<a name="line.128"></a>
-<span class="sourceLineNo">129</span>  public String toString() {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    return new StringBuilder("SnapshotDescription: ")<a name="line.130"></a>
-<span class="sourceLineNo">131</span>            .append("name = ")<a name="line.131"></a>
-<span class="sourceLineNo">132</span>            .append(name)<a name="line.132"></a>
-<span class="sourceLineNo">133</span>            .append("/table = ")<a name="line.133"></a>
-<span class="sourceLineNo">134</span>            .append(table)<a name="line.134"></a>
-<span class="sourceLineNo">135</span>            .append(" /owner = ")<a name="line.135"></a>
-<span class="sourceLineNo">136</span>            .append(owner)<a name="line.136"></a>
-<span class="sourceLineNo">137</span>            .append(creationTime != -1 ? ("/creationtime = " + creationTime) : "")<a name="line.137"></a>
-<span class="sourceLineNo">138</span>            .append(ttl != -1 ? ("/ttl = " + ttl) : "")<a name="line.138"></a>
-<span class="sourceLineNo">139</span>            .append(version != -1 ? ("/version = " + version) : "")<a name="line.139"></a>
-<span class="sourceLineNo">140</span>            .toString();<a name="line.140"></a>
+<span class="sourceLineNo">022</span>import org.apache.commons.lang3.builder.ToStringBuilder;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import org.apache.hadoop.hbase.TableName;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.24"></a>
+<span class="sourceLineNo">025</span><a name="line.25"></a>
+<span class="sourceLineNo">026</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.26"></a>
+<span class="sourceLineNo">027</span><a name="line.27"></a>
+<span class="sourceLineNo">028</span>/**<a name="line.28"></a>
+<span class="sourceLineNo">029</span> * The POJO equivalent of HBaseProtos.SnapshotDescription<a name="line.29"></a>
+<span class="sourceLineNo">030</span> */<a name="line.30"></a>
+<span class="sourceLineNo">031</span>@InterfaceAudience.Public<a name="line.31"></a>
+<span class="sourceLineNo">032</span>public class SnapshotDescription {<a name="line.32"></a>
+<span class="sourceLineNo">033</span>  private final String name;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>  private final TableName table;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>  private final SnapshotType snapShotType;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>  private final String owner;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>  private final long creationTime;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>  private final long ttl;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>  private final int version;<a name="line.39"></a>
+<span class="sourceLineNo">040</span><a name="line.40"></a>
+<span class="sourceLineNo">041</span>  public SnapshotDescription(String name) {<a name="line.41"></a>
+<span class="sourceLineNo">042</span>    this(name, null);<a name="line.42"></a>
+<span class="sourceLineNo">043</span>  }<a name="line.43"></a>
+<span class="sourceLineNo">044</span><a name="line.44"></a>
+<span class="sourceLineNo">045</span>  public SnapshotDescription(String name, TableName table) {<a name="line.45"></a>
+<span class="sourceLineNo">046</span>    this(name, table, SnapshotType.DISABLED, null, -1, -1, null);<a name="line.46"></a>
+<span class="sourceLineNo">047</span>  }<a name="line.47"></a>
+<span class="sourceLineNo">048</span><a name="line.48"></a>
+<span class="sourceLineNo">049</span>  public SnapshotDescription(String name, TableName table, SnapshotType type) {<a name="line.49"></a>
+<span class="sourceLineNo">050</span>    this(name, table, type, null, -1, -1, null);<a name="line.50"></a>
+<span class="sourceLineNo">051</span>  }<a name="line.51"></a>
+<span class="sourceLineNo">052</span><a name="line.52"></a>
+<span class="sourceLineNo">053</span>  public SnapshotDescription(String name, TableName table, SnapshotType type, String owner) {<a name="line.53"></a>
+<span class="sourceLineNo">054</span>    this(name, table, type, owner, -1, -1, null);<a name="line.54"></a>
+<span class="sourceLineNo">055</span>  }<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>  /**<a name="line.57"></a>
+<span class="sourceLineNo">058</span>   * SnapshotDescription Parameterized Constructor<a name="line.58"></a>
+<span class="sourceLineNo">059</span>   *<a name="line.59"></a>
+<span class="sourceLineNo">060</span>   * @param name Name of the snapshot<a name="line.60"></a>
+<span class="sourceLineNo">061</span>   * @param table TableName associated with the snapshot<a name="line.61"></a>
+<span class="sourceLineNo">062</span>   * @param type Type of the snapshot - enum SnapshotType<a name="line.62"></a>
+<span class="sourceLineNo">063</span>   * @param owner Snapshot Owner<a name="line.63"></a>
+<span class="sourceLineNo">064</span>   * @param creationTime Creation time for Snapshot<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   * @param version Snapshot Version<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   * @deprecated since 2.3.0 and will be removed in 4.0.0. Use<a name="line.66"></a>
+<span class="sourceLineNo">067</span>   *   {@link #SnapshotDescription(String, TableName, SnapshotType, String, long, int, Map)}<a name="line.67"></a>
+<span class="sourceLineNo">068</span>   */<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  @Deprecated<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  public SnapshotDescription(String name, TableName table, SnapshotType type, String owner,<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      long creationTime, int version) {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>    this(name, table, type, owner, creationTime, version, null);<a name="line.72"></a>
+<span class="sourceLineNo">073</span>  }<a name="line.73"></a>
+<span class="sourceLineNo">074</span><a name="line.74"></a>
+<span class="sourceLineNo">075</span>  /**<a name="line.75"></a>
+<span class="sourceLineNo">076</span>   * SnapshotDescription Parameterized Constructor<a name="line.76"></a>
+<span class="sourceLineNo">077</span>   *<a name="line.77"></a>
+<span class="sourceLineNo">078</span>   * @param name          Name of the snapshot<a name="line.78"></a>
+<span class="sourceLineNo">079</span>   * @param table         TableName associated with the snapshot<a name="line.79"></a>
+<span class="sourceLineNo">080</span>   * @param type          Type of the snapshot - enum SnapshotType<a name="line.80"></a>
+<span class="sourceLineNo">081</span>   * @param owner         Snapshot Owner<a name="line.81"></a>
+<span class="sourceLineNo">082</span>   * @param creationTime  Creation time for Snapshot<a name="line.82"></a>
+<span class="sourceLineNo">083</span>   * @param version       Snapshot Version<a name="line.83"></a>
+<span class="sourceLineNo">084</span>   * @param snapshotProps Additional properties for snapshot e.g. TTL<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   */<a name="line.85"></a>
+<span class="sourceLineNo">086</span>  public SnapshotDescription(String name, TableName table, SnapshotType type, String owner,<a name="line.86"></a>
+<span class="sourceLineNo">087</span>      long creationTime, int version, Map&lt;String, Object&gt; snapshotProps) {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    this.name = name;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    this.table = table;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>    this.snapShotType = type;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    this.owner = owner;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    this.creationTime = creationTime;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    this.ttl = getTtlFromSnapshotProps(snapshotProps);<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    this.version = version;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  }<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>  private long getTtlFromSnapshotProps(Map&lt;String, Object&gt; snapshotProps) {<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    return MapUtils.getLongValue(snapshotProps, "TTL", -1);<a name="line.98"></a>
+<span class="sourceLineNo">099</span>  }<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  /**<a name="line.101"></a>
+<span class="sourceLineNo">102</span>   * SnapshotDescription Parameterized Constructor<a name="line.102"></a>
+<span class="sourceLineNo">103</span>   *<a name="line.103"></a>
+<span class="sourceLineNo">104</span>   * @param snapshotName  Name of the snapshot<a name="line.104"></a>
+<span class="sourceLineNo">105</span>   * @param tableName     TableName associated with the snapshot<a name="line.105"></a>
+<span class="sourceLineNo">106</span>   * @param type          Type of the snapshot - enum SnapshotType<a name="line.106"></a>
+<span class="sourceLineNo">107</span>   * @param snapshotProps Additional properties for snapshot e.g. TTL<a name="line.107"></a>
+<span class="sourceLineNo">108</span>   */<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  public SnapshotDescription(String snapshotName, TableName tableName, SnapshotType type,<a name="line.109"></a>
+<span class="sourceLineNo">110</span>                             Map&lt;String, Object&gt; snapshotProps) {<a name="line.110"></a>
+<span class="sourceLineNo">111</span>    this(snapshotName, tableName, type, null, -1, -1, snapshotProps);<a name="line.111"></a>
+<span class="sourceLineNo">112</span>  }<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>  public String getName() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    return this.name;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  public String getTableNameAsString() {<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    return this.table.getNameAsString();<a name="line.119"></a>
+<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public TableName getTableName() {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    return this.table;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>  }<a name="line.124"></a>
+<span class="sourceLineNo">125</span><a name="line.125"></a>
+<span class="sourceLineNo">126</span>  public SnapshotType getType() {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>    return this.snapShotType;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
+<span class="sourceLineNo">129</span><a name="line.129"></a>
+<span class="sourceLineNo">130</span>  public String getOwner() {<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    return this.owner;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>  }<a name="line.132"></a>
+<span class="sourceLineNo">133</span><a name="line.133"></a>
+<span class="sourceLineNo">134</span>  public long getCreationTime() {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    return this.creationTime;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>  }<a name="line.136"></a>
+<span class="sourceLineNo">137</span><a name="line.137"></a>
+<span class="sourceLineNo">138</span>  // get snapshot ttl in sec<a name="line.138"></a>
+<span class="sourceLineNo">139</span>  public long getTtl() {<a name="line.139"></a>
+<span class="sourceLineNo">140</span>    return ttl;<a name="line.140"></a>
 <span class="sourceLineNo">141</span>  }<a name="line.141"></a>
-<span class="sourceLineNo">142</span>}<a name="line.142"></a>
+<span class="sourceLineNo">142</span><a name="line.142"></a>
+<span class="sourceLineNo">143</span>  public int getVersion() {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    return this.version;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
+<span class="sourceLineNo">146</span><a name="line.146"></a>
+<span class="sourceLineNo">147</span>  @Override<a name="line.147"></a>
+<span class="sourceLineNo">148</span>  public String toString() {<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    return new ToStringBuilder(this)<a name="line.149"></a>
+<span class="sourceLineNo">150</span>      .append("name", name)<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      .append("table", table)<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      .append("snapShotType", snapShotType)<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      .append("owner", owner)<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      .append("creationTime", creationTime)<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      .append("ttl", ttl)<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      .append("version", version)<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      .toString();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
+<span class="sourceLineNo">159</span>}<a name="line.159"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html b/devapidocs/src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html
index ca8bdb7..b506238 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/security/token/FsDelegationToken.html
@@ -58,130 +58,110 @@
 <span class="sourceLineNo">050</span>  private boolean hasForwardedToken = false;<a name="line.50"></a>
 <span class="sourceLineNo">051</span>  private Token&lt;?&gt; userToken = null;<a name="line.51"></a>
 <span class="sourceLineNo">052</span>  private FileSystem fs = null;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>  private long tokenExpireTime = -1L;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>  private long renewAheadTime = Long.MAX_VALUE;<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  /*<a name="line.56"></a>
-<span class="sourceLineNo">057</span>   * @param renewer the account name that is allowed to renew the token.<a name="line.57"></a>
-<span class="sourceLineNo">058</span>   */<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  public FsDelegationToken(final UserProvider userProvider, final String renewer) {<a name="line.59"></a>
-<span class="sourceLineNo">060</span>    this.userProvider = userProvider;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    this.renewer = renewer;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>  }<a name="line.62"></a>
-<span class="sourceLineNo">063</span><a name="line.63"></a>
-<span class="sourceLineNo">064</span>  /**<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   * @param renewer the account name that is allowed to renew the token.<a name="line.65"></a>
-<span class="sourceLineNo">066</span>   * @param renewAheadTime how long in millis<a name="line.66"></a>
-<span class="sourceLineNo">067</span>   */<a name="line.67"></a>
-<span class="sourceLineNo">068</span>  public FsDelegationToken(final UserProvider userProvider, final String renewer,<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    long renewAheadTime) {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    this.userProvider = userProvider;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>    this.renewer = renewer;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    this.renewAheadTime = renewAheadTime;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  }<a name="line.73"></a>
-<span class="sourceLineNo">074</span><a name="line.74"></a>
-<span class="sourceLineNo">075</span>  /**<a name="line.75"></a>
-<span class="sourceLineNo">076</span>   * Acquire the delegation token for the specified filesystem.<a name="line.76"></a>
-<span class="sourceLineNo">077</span>   * Before requesting a new delegation token, tries to find one already available.<a name="line.77"></a>
-<span class="sourceLineNo">078</span>   * Currently supports checking existing delegation tokens for swebhdfs, webhdfs and hdfs.<a name="line.78"></a>
-<span class="sourceLineNo">079</span>   *<a name="line.79"></a>
-<span class="sourceLineNo">080</span>   * @param fs the filesystem that requires the delegation token<a name="line.80"></a>
-<span class="sourceLineNo">081</span>   * @throws IOException on fs.getDelegationToken() failure<a name="line.81"></a>
-<span class="sourceLineNo">082</span>   */<a name="line.82"></a>
-<span class="sourceLineNo">083</span>  public void acquireDelegationToken(final FileSystem fs)<a name="line.83"></a>
-<span class="sourceLineNo">084</span>      throws IOException {<a name="line.84"></a>
-<span class="sourceLineNo">085</span>    String tokenKind;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    String scheme = fs.getUri().getScheme();<a name="line.86"></a>
-<span class="sourceLineNo">087</span>    if (SWEBHDFS_SCHEME.equalsIgnoreCase(scheme)) {<a name="line.87"></a>
-<span class="sourceLineNo">088</span>      tokenKind = SWEBHDFS_TOKEN_KIND.toString();<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    } else if (WEBHDFS_SCHEME.equalsIgnoreCase(scheme)) {<a name="line.89"></a>
-<span class="sourceLineNo">090</span>      tokenKind = WEBHDFS_TOKEN_KIND.toString();<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    } else if (HDFS_URI_SCHEME.equalsIgnoreCase(scheme)) {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>      tokenKind = HDFS_DELEGATION_KIND.toString();<a name="line.92"></a>
-<span class="sourceLineNo">093</span>    } else {<a name="line.93"></a>
-<span class="sourceLineNo">094</span>      LOG.warn("Unknown FS URI scheme: " + scheme);<a name="line.94"></a>
-<span class="sourceLineNo">095</span>      // Preserve default behavior<a name="line.95"></a>
-<span class="sourceLineNo">096</span>      tokenKind = HDFS_DELEGATION_KIND.toString();<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    }<a name="line.97"></a>
-<span class="sourceLineNo">098</span><a name="line.98"></a>
-<span class="sourceLineNo">099</span>    acquireDelegationToken(tokenKind, fs);<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
-<span class="sourceLineNo">101</span><a name="line.101"></a>
-<span class="sourceLineNo">102</span>  /**<a name="line.102"></a>
-<span class="sourceLineNo">103</span>   * Acquire the delegation token for the specified filesystem and token kind.<a name="line.103"></a>
-<span class="sourceLineNo">104</span>   * Before requesting a new delegation token, tries to find one already available.<a name="line.104"></a>
-<span class="sourceLineNo">105</span>   *<a name="line.105"></a>
-<span class="sourceLineNo">106</span>   * @param tokenKind non-null token kind to get delegation token from the {@link UserProvider}<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * @param fs the filesystem that requires the delegation token<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * @throws IOException on fs.getDelegationToken() failure<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   */<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  public void acquireDelegationToken(final String tokenKind, final FileSystem fs)<a name="line.110"></a>
-<span class="sourceLineNo">111</span>      throws IOException {<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    Objects.requireNonNull(tokenKind, "tokenKind:null");<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    if (userProvider.isHadoopSecurityEnabled()) {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      this.fs = fs;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>      userToken = userProvider.getCurrent().getToken(tokenKind, fs.getCanonicalServiceName());<a name="line.115"></a>
-<span class="sourceLineNo">116</span>      //We should acquire token when never acquired before or token is expiring or already expired<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      if (userToken == null || tokenExpireTime &lt;= 0<a name="line.117"></a>
-<span class="sourceLineNo">118</span>        || System.currentTimeMillis() &gt; tokenExpireTime - renewAheadTime) {<a name="line.118"></a>
-<span class="sourceLineNo">119</span>        hasForwardedToken = false;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>        try {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>          userToken = fs.getDelegationToken(renewer);<a name="line.121"></a>
-<span class="sourceLineNo">122</span>          //After acquired the new token,we quickly renew it to get the token expiration<a name="line.122"></a>
-<span class="sourceLineNo">123</span>          //time to confirm to renew it before expiration<a name="line.123"></a>
-<span class="sourceLineNo">124</span>          tokenExpireTime = userToken.renew(fs.getConf());<a name="line.124"></a>
-<span class="sourceLineNo">125</span>          LOG.debug("Acquired new token " + userToken + ". Expiration time: " + tokenExpireTime);<a name="line.125"></a>
-<span class="sourceLineNo">126</span>          userProvider.getCurrent().addToken(userToken);<a name="line.126"></a>
-<span class="sourceLineNo">127</span>        } catch (InterruptedException | NullPointerException e) {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>          // we need to handle NullPointerException in case HADOOP-10009 is missing<a name="line.128"></a>
-<span class="sourceLineNo">129</span>          LOG.error("Failed to get token for " + renewer, e);<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        }<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      } else {<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        hasForwardedToken = true;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>        LOG.info("Use the existing token: " + userToken);<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      }<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    }<a name="line.135"></a>
-<span class="sourceLineNo">136</span>  }<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>  /**<a name="line.138"></a>
-<span class="sourceLineNo">139</span>   * Releases a previously acquired delegation token.<a name="line.139"></a>
-<span class="sourceLineNo">140</span>   */<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  public void releaseDelegationToken() {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    if (userProvider.isHadoopSecurityEnabled()) {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      if (userToken != null &amp;&amp; !hasForwardedToken) {<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        try {<a name="line.144"></a>
-<span class="sourceLineNo">145</span>          userToken.cancel(this.fs.getConf());<a name="line.145"></a>
-<span class="sourceLineNo">146</span>        } catch (Exception e) {<a name="line.146"></a>
-<span class="sourceLineNo">147</span>          LOG.warn("Failed to cancel HDFS delegation token: " + userToken, e);<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        }<a name="line.148"></a>
-<span class="sourceLineNo">149</span>      }<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      this.userToken = null;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      this.fs = null;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    }<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>  public UserProvider getUserProvider() {<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    return userProvider;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>  }<a name="line.157"></a>
-<span class="sourceLineNo">158</span><a name="line.158"></a>
-<span class="sourceLineNo">159</span>  /**<a name="line.159"></a>
-<span class="sourceLineNo">160</span>   * @return the account name that is allowed to renew the token.<a name="line.160"></a>
-<span class="sourceLineNo">161</span>   */<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  public String getRenewer() {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    return renewer;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  }<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>  /**<a name="line.166"></a>
-<span class="sourceLineNo">167</span>   * @return the delegation token acquired, or null in case it was not acquired<a name="line.167"></a>
-<span class="sourceLineNo">168</span>   */<a name="line.168"></a>
-<span class="sourceLineNo">169</span>  public Token&lt;?&gt; getUserToken() {<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    return userToken;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
-<span class="sourceLineNo">172</span><a name="line.172"></a>
-<span class="sourceLineNo">173</span>  public FileSystem getFileSystem() {<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    return fs;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  }<a name="line.175"></a>
-<span class="sourceLineNo">176</span>}<a name="line.176"></a>
+<span class="sourceLineNo">053</span><a name="line.53"></a>
+<span class="sourceLineNo">054</span>  /*<a name="line.54"></a>
+<span class="sourceLineNo">055</span>   * @param renewer the account name that is allowed to renew the token.<a name="line.55"></a>
+<span class="sourceLineNo">056</span>   */<a name="line.56"></a>
+<span class="sourceLineNo">057</span>  public FsDelegationToken(final UserProvider userProvider, final String renewer) {<a name="line.57"></a>
+<span class="sourceLineNo">058</span>    this.userProvider = userProvider;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>    this.renewer = renewer;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>  }<a name="line.60"></a>
+<span class="sourceLineNo">061</span><a name="line.61"></a>
+<span class="sourceLineNo">062</span>  /**<a name="line.62"></a>
+<span class="sourceLineNo">063</span>   * Acquire the delegation token for the specified filesystem.<a name="line.63"></a>
+<span class="sourceLineNo">064</span>   * Before requesting a new delegation token, tries to find one already available.<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   * Currently supports checking existing delegation tokens for swebhdfs, webhdfs and hdfs.<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   *<a name="line.66"></a>
+<span class="sourceLineNo">067</span>   * @param fs the filesystem that requires the delegation token<a name="line.67"></a>
+<span class="sourceLineNo">068</span>   * @throws IOException on fs.getDelegationToken() failure<a name="line.68"></a>
+<span class="sourceLineNo">069</span>   */<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  public void acquireDelegationToken(final FileSystem fs)<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      throws IOException {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>    String tokenKind;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    String scheme = fs.getUri().getScheme();<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    if (SWEBHDFS_SCHEME.equalsIgnoreCase(scheme)) {<a name="line.74"></a>
+<span class="sourceLineNo">075</span>      tokenKind = SWEBHDFS_TOKEN_KIND.toString();<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    } else if (WEBHDFS_SCHEME.equalsIgnoreCase(scheme)) {<a name="line.76"></a>
+<span class="sourceLineNo">077</span>      tokenKind = WEBHDFS_TOKEN_KIND.toString();<a name="line.77"></a>
+<span class="sourceLineNo">078</span>    } else if (HDFS_URI_SCHEME.equalsIgnoreCase(scheme)) {<a name="line.78"></a>
+<span class="sourceLineNo">079</span>      tokenKind = HDFS_DELEGATION_KIND.toString();<a name="line.79"></a>
+<span class="sourceLineNo">080</span>    } else {<a name="line.80"></a>
+<span class="sourceLineNo">081</span>      LOG.warn("Unknown FS URI scheme: " + scheme);<a name="line.81"></a>
+<span class="sourceLineNo">082</span>      // Preserve default behavior<a name="line.82"></a>
+<span class="sourceLineNo">083</span>      tokenKind = HDFS_DELEGATION_KIND.toString();<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    }<a name="line.84"></a>
+<span class="sourceLineNo">085</span><a name="line.85"></a>
+<span class="sourceLineNo">086</span>    acquireDelegationToken(tokenKind, fs);<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  }<a name="line.87"></a>
+<span class="sourceLineNo">088</span><a name="line.88"></a>
+<span class="sourceLineNo">089</span>  /**<a name="line.89"></a>
+<span class="sourceLineNo">090</span>   * Acquire the delegation token for the specified filesystem and token kind.<a name="line.90"></a>
+<span class="sourceLineNo">091</span>   * Before requesting a new delegation token, tries to find one already available.<a name="line.91"></a>
+<span class="sourceLineNo">092</span>   *<a name="line.92"></a>
+<span class="sourceLineNo">093</span>   * @param tokenKind non-null token kind to get delegation token from the {@link UserProvider}<a name="line.93"></a>
+<span class="sourceLineNo">094</span>   * @param fs the filesystem that requires the delegation token<a name="line.94"></a>
+<span class="sourceLineNo">095</span>   * @throws IOException on fs.getDelegationToken() failure<a name="line.95"></a>
+<span class="sourceLineNo">096</span>   */<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  public void acquireDelegationToken(final String tokenKind, final FileSystem fs)<a name="line.97"></a>
+<span class="sourceLineNo">098</span>      throws IOException {<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    Objects.requireNonNull(tokenKind, "tokenKind:null");<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    if (userProvider.isHadoopSecurityEnabled()) {<a name="line.100"></a>
+<span class="sourceLineNo">101</span>      this.fs = fs;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>      userToken = userProvider.getCurrent().getToken(tokenKind, fs.getCanonicalServiceName());<a name="line.102"></a>
+<span class="sourceLineNo">103</span>      if (userToken == null) {<a name="line.103"></a>
+<span class="sourceLineNo">104</span>        hasForwardedToken = false;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>        try {<a name="line.105"></a>
+<span class="sourceLineNo">106</span>          userToken = fs.getDelegationToken(renewer);<a name="line.106"></a>
+<span class="sourceLineNo">107</span>        } catch (NullPointerException npe) {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>          // we need to handle NullPointerException in case HADOOP-10009 is missing<a name="line.108"></a>
+<span class="sourceLineNo">109</span>          LOG.error("Failed to get token for " + renewer);<a name="line.109"></a>
+<span class="sourceLineNo">110</span>        }<a name="line.110"></a>
+<span class="sourceLineNo">111</span>      } else {<a name="line.111"></a>
+<span class="sourceLineNo">112</span>        hasForwardedToken = true;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>        LOG.info("Use the existing token: " + userToken);<a name="line.113"></a>
+<span class="sourceLineNo">114</span>      }<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    }<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  /**<a name="line.118"></a>
+<span class="sourceLineNo">119</span>   * Releases a previously acquired delegation token.<a name="line.119"></a>
+<span class="sourceLineNo">120</span>   */<a name="line.120"></a>
+<span class="sourceLineNo">121</span>  public void releaseDelegationToken() {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    if (userProvider.isHadoopSecurityEnabled()) {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>      if (userToken != null &amp;&amp; !hasForwardedToken) {<a name="line.123"></a>
+<span class="sourceLineNo">124</span>        try {<a name="line.124"></a>
+<span class="sourceLineNo">125</span>          userToken.cancel(this.fs.getConf());<a name="line.125"></a>
+<span class="sourceLineNo">126</span>        } catch (Exception e) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>          LOG.warn("Failed to cancel HDFS delegation token: " + userToken, e);<a name="line.127"></a>
+<span class="sourceLineNo">128</span>        }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      }<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      this.userToken = null;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>      this.fs = null;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    }<a name="line.132"></a>
+<span class="sourceLineNo">133</span>  }<a name="line.133"></a>
+<span class="sourceLineNo">134</span><a name="line.134"></a>
+<span class="sourceLineNo">135</span>  public UserProvider getUserProvider() {<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    return userProvider;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>  /**<a name="line.139"></a>
+<span class="sourceLineNo">140</span>   * @return the account name that is allowed to renew the token.<a name="line.140"></a>
+<span class="sourceLineNo">141</span>   */<a name="line.141"></a>
+<span class="sourceLineNo">142</span>  public String getRenewer() {<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    return renewer;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>  }<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span>  /**<a name="line.146"></a>
+<span class="sourceLineNo">147</span>   * @return the delegation token acquired, or null in case it was not acquired<a name="line.147"></a>
+<span class="sourceLineNo">148</span>   */<a name="line.148"></a>
+<span class="sourceLineNo">149</span>  public Token&lt;?&gt; getUserToken() {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    return userToken;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
+<span class="sourceLineNo">152</span><a name="line.152"></a>
+<span class="sourceLineNo">153</span>  public FileSystem getFileSystem() {<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    return fs;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  }<a name="line.155"></a>
+<span class="sourceLineNo">156</span>}<a name="line.156"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html b/devapidocs/src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html
index 494125b..32993ae 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.BulkHFileVisitor.html
@@ -133,998 +133,992 @@
 <span class="sourceLineNo">125</span>   */<a name="line.125"></a>
 <span class="sourceLineNo">126</span>  public static final String BULK_LOAD_HFILES_BY_FAMILY = "hbase.mapreduce.bulkload.by.family";<a name="line.126"></a>
 <span class="sourceLineNo">127</span><a name="line.127"></a>
-<span class="sourceLineNo">128</span>  //HDFS DelegationToken is cached and should be renewed before token expiration<a name="line.128"></a>
-<span class="sourceLineNo">129</span>  public static final String BULK_LOAD_RENEW_TOKEN_TIME_BUFFER<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    = "hbase.bulkload.renew.token.time.buffer";<a name="line.130"></a>
+<span class="sourceLineNo">128</span>  // We use a '.' prefix which is ignored when walking directory trees<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  // above. It is invalid family name.<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  static final String TMP_DIR = ".tmp";<a name="line.130"></a>
 <span class="sourceLineNo">131</span><a name="line.131"></a>
-<span class="sourceLineNo">132</span>  // We use a '.' prefix which is ignored when walking directory trees<a name="line.132"></a>
-<span class="sourceLineNo">133</span>  // above. It is invalid family name.<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  static final String TMP_DIR = ".tmp";<a name="line.134"></a>
+<span class="sourceLineNo">132</span>  private final int maxFilesPerRegionPerFamily;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>  private final boolean assignSeqIds;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  private boolean bulkLoadByFamily;<a name="line.134"></a>
 <span class="sourceLineNo">135</span><a name="line.135"></a>
-<span class="sourceLineNo">136</span>  private final int maxFilesPerRegionPerFamily;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>  private final boolean assignSeqIds;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  private boolean bulkLoadByFamily;<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>  // Source delegation token<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  private final FsDelegationToken fsDelegationToken;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>  private final UserProvider userProvider;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>  private final int nrThreads;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  private final AtomicInteger numRetries = new AtomicInteger(0);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  private String bulkToken;<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  private List&lt;String&gt; clusterIds = new ArrayList&lt;&gt;();<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  private boolean replicate = true;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>  private final long retryAheadTime;<a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>  public BulkLoadHFilesTool(Configuration conf) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    // make a copy, just to be sure we're not overriding someone else's config<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    super(new Configuration(conf));<a name="line.153"></a>
-<span class="sourceLineNo">154</span>    // disable blockcache for tool invocation, see HBASE-10500<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    userProvider = UserProvider.instantiate(conf);<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    retryAheadTime = conf.getLong(BULK_LOAD_RENEW_TOKEN_TIME_BUFFER, 60000L);<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    fsDelegationToken = new FsDelegationToken(userProvider, "renewer", retryAheadTime);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>    assignSeqIds = conf.getBoolean(ASSIGN_SEQ_IDS, true);<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    maxFilesPerRegionPerFamily = conf.getInt(MAX_FILES_PER_REGION_PER_FAMILY, 32);<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    nrThreads = conf.getInt("hbase.loadincremental.threads.max",<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      Runtime.getRuntime().availableProcessors());<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    bulkLoadByFamily = conf.getBoolean(BULK_LOAD_HFILES_BY_FAMILY, false);<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  }<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>  // Initialize a thread pool<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  private ExecutorService createExecutorService() {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    ThreadPoolExecutor pool = new ThreadPoolExecutor(nrThreads, nrThreads, 60, TimeUnit.SECONDS,<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      new LinkedBlockingQueue&lt;&gt;(),<a name="line.169"></a>
-<span class="sourceLineNo">170</span>      new ThreadFactoryBuilder().setNameFormat("BulkLoadHFilesTool-%1$d").setDaemon(true).build());<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    pool.allowCoreThreadTimeOut(true);<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    return pool;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>  private boolean isCreateTable() {<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    return "yes".equalsIgnoreCase(getConf().get(CREATE_TABLE_CONF_KEY, "yes"));<a name="line.176"></a>
-<span class="sourceLineNo">177</span>  }<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  private boolean isSilence() {<a name="line.179"></a>
-<span class="sourceLineNo">180</span>    return "yes".equalsIgnoreCase(getConf().get(IGNORE_UNMATCHED_CF_CONF_KEY, ""));<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  }<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>  private boolean isAlwaysCopyFiles() {<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return getConf().getBoolean(ALWAYS_COPY_FILES, false);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  }<a name="line.185"></a>
+<span class="sourceLineNo">136</span>  // Source delegation token<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  private final FsDelegationToken fsDelegationToken;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>  private final UserProvider userProvider;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>  private final int nrThreads;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  private final AtomicInteger numRetries = new AtomicInteger(0);<a name="line.140"></a>
+<span class="sourceLineNo">141</span>  private String bulkToken;<a name="line.141"></a>
+<span class="sourceLineNo">142</span><a name="line.142"></a>
+<span class="sourceLineNo">143</span>  private List&lt;String&gt; clusterIds = new ArrayList&lt;&gt;();<a name="line.143"></a>
+<span class="sourceLineNo">144</span>  private boolean replicate = true;<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span>  public BulkLoadHFilesTool(Configuration conf) {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    // make a copy, just to be sure we're not overriding someone else's config<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    super(new Configuration(conf));<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    // disable blockcache for tool invocation, see HBASE-10500<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0);<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    userProvider = UserProvider.instantiate(conf);<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    fsDelegationToken = new FsDelegationToken(userProvider, "renewer");<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    assignSeqIds = conf.getBoolean(ASSIGN_SEQ_IDS, true);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    maxFilesPerRegionPerFamily = conf.getInt(MAX_FILES_PER_REGION_PER_FAMILY, 32);<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    nrThreads = conf.getInt("hbase.loadincremental.threads.max",<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      Runtime.getRuntime().availableProcessors());<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    bulkLoadByFamily = conf.getBoolean(BULK_LOAD_HFILES_BY_FAMILY, false);<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
+<span class="sourceLineNo">159</span><a name="line.159"></a>
+<span class="sourceLineNo">160</span>  // Initialize a thread pool<a name="line.160"></a>
+<span class="sourceLineNo">161</span>  private ExecutorService createExecutorService() {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    ThreadPoolExecutor pool = new ThreadPoolExecutor(nrThreads, nrThreads, 60, TimeUnit.SECONDS,<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      new LinkedBlockingQueue&lt;&gt;(),<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      new ThreadFactoryBuilder().setNameFormat("BulkLoadHFilesTool-%1$d").setDaemon(true).build());<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    pool.allowCoreThreadTimeOut(true);<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    return pool;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>  }<a name="line.167"></a>
+<span class="sourceLineNo">168</span><a name="line.168"></a>
+<span class="sourceLineNo">169</span>  private boolean isCreateTable() {<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    return "yes".equalsIgnoreCase(getConf().get(CREATE_TABLE_CONF_KEY, "yes"));<a name="line.170"></a>
+<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
+<span class="sourceLineNo">172</span><a name="line.172"></a>
+<span class="sourceLineNo">173</span>  private boolean isSilence() {<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    return "yes".equalsIgnoreCase(getConf().get(IGNORE_UNMATCHED_CF_CONF_KEY, ""));<a name="line.174"></a>
+<span class="sourceLineNo">175</span>  }<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>  private boolean isAlwaysCopyFiles() {<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    return getConf().getBoolean(ALWAYS_COPY_FILES, false);<a name="line.178"></a>
+<span class="sourceLineNo">179</span>  }<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>  private static boolean shouldCopyHFileMetaKey(byte[] key) {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    // skip encoding to keep hfile meta consistent with data block info, see HBASE-15085<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    if (Bytes.equals(key, HFileDataBlockEncoder.DATA_BLOCK_ENCODING)) {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>      return false;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    }<a name="line.185"></a>
 <span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  private static boolean shouldCopyHFileMetaKey(byte[] key) {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    // skip encoding to keep hfile meta consistent with data block info, see HBASE-15085<a name="line.188"></a>
-<span class="sourceLineNo">189</span>    if (Bytes.equals(key, HFileDataBlockEncoder.DATA_BLOCK_ENCODING)) {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>      return false;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>    }<a name="line.191"></a>
-<span class="sourceLineNo">192</span><a name="line.192"></a>
-<span class="sourceLineNo">193</span>    return !HFileInfo.isReservedFileInfoKey(key);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>  }<a name="line.194"></a>
-<span class="sourceLineNo">195</span><a name="line.195"></a>
-<span class="sourceLineNo">196</span>  /**<a name="line.196"></a>
-<span class="sourceLineNo">197</span>   * Checks whether there is any invalid family name in HFiles to be bulk loaded.<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   */<a name="line.198"></a>
-<span class="sourceLineNo">199</span>  private static void validateFamiliesInHFiles(TableDescriptor tableDesc,<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      Deque&lt;LoadQueueItem&gt; queue, boolean silence) throws IOException {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    Set&lt;String&gt; familyNames = Arrays.stream(tableDesc.getColumnFamilies())<a name="line.201"></a>
-<span class="sourceLineNo">202</span>      .map(ColumnFamilyDescriptor::getNameAsString).collect(Collectors.toSet());<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    List&lt;String&gt; unmatchedFamilies = queue.stream().map(item -&gt; Bytes.toString(item.getFamily()))<a name="line.203"></a>
-<span class="sourceLineNo">204</span>      .filter(fn -&gt; !familyNames.contains(fn)).distinct().collect(Collectors.toList());<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    if (unmatchedFamilies.size() &gt; 0) {<a name="line.205"></a>
-<span class="sourceLineNo">206</span>      String msg =<a name="line.206"></a>
-<span class="sourceLineNo">207</span>        "Unmatched family names found: unmatched family names in HFiles to be bulkloaded: " +<a name="line.207"></a>
-<span class="sourceLineNo">208</span>          unmatchedFamilies + "; valid family names of table " + tableDesc.getTableName() +<a name="line.208"></a>
-<span class="sourceLineNo">209</span>          " are: " + familyNames;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      LOG.error(msg);<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (!silence) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        throw new IOException(msg);<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>  }<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>  /**<a name="line.217"></a>
-<span class="sourceLineNo">218</span>   * Populate the Queue with given HFiles<a name="line.218"></a>
-<span class="sourceLineNo">219</span>   */<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static void populateLoadQueue(Deque&lt;LoadQueueItem&gt; ret, Map&lt;byte[], List&lt;Path&gt;&gt; map) {<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    map.forEach((k, v) -&gt; v.stream().map(p -&gt; new LoadQueueItem(k, p)).forEachOrdered(ret::add));<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  }<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  private interface BulkHFileVisitor&lt;TFamily&gt; {<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>    TFamily bulkFamily(byte[] familyName) throws IOException;<a name="line.226"></a>
-<span class="sourceLineNo">227</span><a name="line.227"></a>
-<span class="sourceLineNo">228</span>    void bulkHFile(TFamily family, FileStatus hfileStatus) throws IOException;<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  }<a name="line.229"></a>
-<span class="sourceLineNo">230</span><a name="line.230"></a>
-<span class="sourceLineNo">231</span>  /**<a name="line.231"></a>
-<span class="sourceLineNo">232</span>   * Iterate over the bulkDir hfiles. Skip reference, HFileLink, files starting with "_". Check and<a name="line.232"></a>
-<span class="sourceLineNo">233</span>   * skip non-valid hfiles by default, or skip this validation by setting {@link #VALIDATE_HFILES}<a name="line.233"></a>
-<span class="sourceLineNo">234</span>   * to false.<a name="line.234"></a>
-<span class="sourceLineNo">235</span>   */<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  private static &lt;TFamily&gt; void visitBulkHFiles(FileSystem fs, Path bulkDir,<a name="line.236"></a>
-<span class="sourceLineNo">237</span>      BulkHFileVisitor&lt;TFamily&gt; visitor, boolean validateHFile) throws IOException {<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    FileStatus[] familyDirStatuses = fs.listStatus(bulkDir);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    for (FileStatus familyStat : familyDirStatuses) {<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      if (!familyStat.isDirectory()) {<a name="line.240"></a>
-<span class="sourceLineNo">241</span>        LOG.warn("Skipping non-directory " + familyStat.getPath());<a name="line.241"></a>
-<span class="sourceLineNo">242</span>        continue;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      }<a name="line.243"></a>
-<span class="sourceLineNo">244</span>      Path familyDir = familyStat.getPath();<a name="line.244"></a>
-<span class="sourceLineNo">245</span>      byte[] familyName = Bytes.toBytes(familyDir.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>      // Skip invalid family<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      try {<a name="line.247"></a>
-<span class="sourceLineNo">248</span>        ColumnFamilyDescriptorBuilder.isLegalColumnFamilyName(familyName);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>      } catch (IllegalArgumentException e) {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>        LOG.warn("Skipping invalid " + familyStat.getPath());<a name="line.250"></a>
-<span class="sourceLineNo">251</span>        continue;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      }<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      TFamily family = visitor.bulkFamily(familyName);<a name="line.253"></a>
-<span class="sourceLineNo">254</span><a name="line.254"></a>
-<span class="sourceLineNo">255</span>      FileStatus[] hfileStatuses = fs.listStatus(familyDir);<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      for (FileStatus hfileStatus : hfileStatuses) {<a name="line.256"></a>
-<span class="sourceLineNo">257</span>        if (!fs.isFile(hfileStatus.getPath())) {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>          LOG.warn("Skipping non-file " + hfileStatus);<a name="line.258"></a>
-<span class="sourceLineNo">259</span>          continue;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        }<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>        Path hfile = hfileStatus.getPath();<a name="line.262"></a>
-<span class="sourceLineNo">263</span>        // Skip "_", reference, HFileLink<a name="line.263"></a>
-<span class="sourceLineNo">264</span>        String fileName = hfile.getName();<a name="line.264"></a>
-<span class="sourceLineNo">265</span>        if (fileName.startsWith("_")) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>          continue;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        }<a name="line.267"></a>
-<span class="sourceLineNo">268</span>        if (StoreFileInfo.isReference(fileName)) {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>          LOG.warn("Skipping reference " + fileName);<a name="line.269"></a>
-<span class="sourceLineNo">270</span>          continue;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        }<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        if (HFileLink.isHFileLink(fileName)) {<a name="line.272"></a>
-<span class="sourceLineNo">273</span>          LOG.warn("Skipping HFileLink " + fileName);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>          continue;<a name="line.274"></a>
-<span class="sourceLineNo">275</span>        }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>        // Validate HFile Format if needed<a name="line.277"></a>
-<span class="sourceLineNo">278</span>        if (validateHFile) {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>          try {<a name="line.279"></a>
-<span class="sourceLineNo">280</span>            if (!HFile.isHFileFormat(fs, hfile)) {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>              LOG.warn("the file " + hfile + " doesn't seems to be an hfile. skipping");<a name="line.281"></a>
-<span class="sourceLineNo">282</span>              continue;<a name="line.282"></a>
-<span class="sourceLineNo">283</span>            }<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          } catch (FileNotFoundException e) {<a name="line.284"></a>
-<span class="sourceLineNo">285</span>            LOG.warn("the file " + hfile + " was removed");<a name="line.285"></a>
-<span class="sourceLineNo">286</span>            continue;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          }<a name="line.287"></a>
-<span class="sourceLineNo">288</span>        }<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>        visitor.bulkHFile(family, hfileStatus);<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      }<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    }<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  }<a name="line.293"></a>
-<span class="sourceLineNo">294</span><a name="line.294"></a>
-<span class="sourceLineNo">295</span>  /**<a name="line.295"></a>
-<span class="sourceLineNo">296</span>   * Walk the given directory for all HFiles, and return a Queue containing all such files.<a name="line.296"></a>
-<span class="sourceLineNo">297</span>   */<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private static void discoverLoadQueue(Configuration conf, Deque&lt;LoadQueueItem&gt; ret, Path hfofDir,<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      boolean validateHFile) throws IOException {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    visitBulkHFiles(hfofDir.getFileSystem(conf), hfofDir, new BulkHFileVisitor&lt;byte[]&gt;() {<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      @Override<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      public byte[] bulkFamily(final byte[] familyName) {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>        return familyName;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      }<a name="line.304"></a>
-<span class="sourceLineNo">305</span><a name="line.305"></a>
-<span class="sourceLineNo">306</span>      @Override<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      public void bulkHFile(final byte[] family, final FileStatus hfile) {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>        long length = hfile.getLen();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        if (length &gt; conf.getLong(HConstants.HREGION_MAX_FILESIZE,<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          HConstants.DEFAULT_MAX_FILE_SIZE)) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>          LOG.warn("Trying to bulk load hfile " + hfile.getPath() + " with size: " + length +<a name="line.311"></a>
-<span class="sourceLineNo">312</span>            " bytes can be problematic as it may lead to oversplitting.");<a name="line.312"></a>
-<span class="sourceLineNo">313</span>        }<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        ret.add(new LoadQueueItem(family, hfile.getPath()));<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      }<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    }, validateHFile);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>  }<a name="line.317"></a>
-<span class="sourceLineNo">318</span><a name="line.318"></a>
-<span class="sourceLineNo">319</span>  /**<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   * Prepare a collection of {@code LoadQueueItem} from list of source hfiles contained in the<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * passed directory and validates whether the prepared queue has all the valid table column<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * families in it.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * @param map map of family to List of hfiles<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * @param tableName table to which hfiles should be loaded<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * @param queue queue which needs to be loaded into the table<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * @param silence true to ignore unmatched column families<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * @throws IOException If any I/O or network error occurred<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   */<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  public static void prepareHFileQueue(AsyncClusterConnection conn, TableName tableName,<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      Map&lt;byte[], List&lt;Path&gt;&gt; map, Deque&lt;LoadQueueItem&gt; queue, boolean silence) throws IOException {<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    populateLoadQueue(queue, map);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    validateFamiliesInHFiles(FutureUtils.get(conn.getAdmin().getDescriptor(tableName)), queue,<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      silence);<a name="line.333"></a>
-<span class="sourceLineNo">334</span>  }<a name="line.334"></a>
-<span class="sourceLineNo">335</span><a name="line.335"></a>
-<span class="sourceLineNo">336</span>  /**<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * Prepare a collection of {@code LoadQueueItem} from list of source hfiles contained in the<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   * passed directory and validates whether the prepared queue has all the valid table column<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * families in it.<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * @param hfilesDir directory containing list of hfiles to be loaded into the table<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   * @param queue queue which needs to be loaded into the table<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   * @param validateHFile if true hfiles will be validated for its format<a name="line.342"></a>
-<span class="sourceLineNo">343</span>   * @param silence true to ignore unmatched column families<a name="line.343"></a>
-<span class="sourceLineNo">344</span>   * @throws IOException If any I/O or network error occurred<a name="line.344"></a>
-<span class="sourceLineNo">345</span>   */<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  public static void prepareHFileQueue(Configuration conf, AsyncClusterConnection conn,<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      TableName tableName, Path hfilesDir, Deque&lt;LoadQueueItem&gt; queue, boolean validateHFile,<a name="line.347"></a>
-<span class="sourceLineNo">348</span>      boolean silence) throws IOException {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    discoverLoadQueue(conf, queue, hfilesDir, validateHFile);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    validateFamiliesInHFiles(FutureUtils.get(conn.getAdmin().getDescriptor(tableName)), queue,<a name="line.350"></a>
-<span class="sourceLineNo">351</span>      silence);<a name="line.351"></a>
-<span class="sourceLineNo">352</span>  }<a name="line.352"></a>
-<span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>  /**<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * Used by the replication sink to load the hfiles from the source cluster. It does the following,<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   * &lt;ol&gt;<a name="line.356"></a>
-<span class="sourceLineNo">357</span>   * &lt;li&gt;{@link #groupOrSplitPhase(AsyncClusterConnection, TableName, ExecutorService, Deque, List)}<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   * &lt;/li&gt;<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   * &lt;li&gt;{@link #bulkLoadPhase(AsyncClusterConnection, TableName, Deque, Multimap, boolean, Map)}<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * &lt;/li&gt;<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * &lt;/ol&gt;<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * @param conn Connection to use<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * @param tableName Table to which these hfiles should be loaded to<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @param queue {@code LoadQueueItem} has hfiles yet to be loaded<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   */<a name="line.365"></a>
-<span class="sourceLineNo">366</span>  public void loadHFileQueue(AsyncClusterConnection conn, TableName tableName,<a name="line.366"></a>
-<span class="sourceLineNo">367</span>      Deque&lt;LoadQueueItem&gt; queue, boolean copyFiles) throws IOException {<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    ExecutorService pool = createExecutorService();<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    try {<a name="line.369"></a>
-<span class="sourceLineNo">370</span>      Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups = groupOrSplitPhase(conn, tableName, pool,<a name="line.370"></a>
-<span class="sourceLineNo">371</span>        queue, FutureUtils.get(conn.getRegionLocator(tableName).getStartEndKeys())).getFirst();<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      bulkLoadPhase(conn, tableName, queue, regionGroups, copyFiles, null);<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    } finally {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      pool.shutdown();<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    }<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  }<a name="line.376"></a>
-<span class="sourceLineNo">377</span><a name="line.377"></a>
-<span class="sourceLineNo">378</span>  /**<a name="line.378"></a>
-<span class="sourceLineNo">379</span>   * Attempts to do an atomic load of many hfiles into a region. If it fails, it returns a list of<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   * hfiles that need to be retried. If it is successful it will return an empty list. NOTE: To<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   * maintain row atomicity guarantees, region server side should succeed atomically and fails<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   * atomically.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * @param conn Connection to use<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param tableName Table to which these hfiles should be loaded to<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @param copyFiles whether replicate to peer cluster while bulkloading<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   * @param first the start key of region<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @param lqis hfiles should be loaded<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   * @return empty list if success, list of items to retry on recoverable failure<a name="line.388"></a>
-<span class="sourceLineNo">389</span>   */<a name="line.389"></a>
-<span class="sourceLineNo">390</span>  @VisibleForTesting<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  protected CompletableFuture&lt;Collection&lt;LoadQueueItem&gt;&gt; tryAtomicRegionLoad(<a name="line.391"></a>
-<span class="sourceLineNo">392</span>      final AsyncClusterConnection conn, final TableName tableName, boolean copyFiles,<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      final byte[] first, Collection&lt;LoadQueueItem&gt; lqis) {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    List&lt;Pair&lt;byte[], String&gt;&gt; familyPaths =<a name="line.394"></a>
-<span class="sourceLineNo">395</span>        lqis.stream().map(lqi -&gt; Pair.newPair(lqi.getFamily(), lqi.getFilePath().toString()))<a name="line.395"></a>
-<span class="sourceLineNo">396</span>            .collect(Collectors.toList());<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    CompletableFuture&lt;Collection&lt;LoadQueueItem&gt;&gt; future = new CompletableFuture&lt;&gt;();<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    FutureUtils<a name="line.398"></a>
-<span class="sourceLineNo">399</span>        .addListener(<a name="line.399"></a>
-<span class="sourceLineNo">400</span>          conn.bulkLoad(tableName, familyPaths, first, assignSeqIds,<a name="line.400"></a>
-<span class="sourceLineNo">401</span>            fsDelegationToken.getUserToken(), bulkToken, copyFiles, clusterIds, replicate),<a name="line.401"></a>
-<span class="sourceLineNo">402</span>          (loaded, error) -&gt; {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>            if (error != null) {<a name="line.403"></a>
-<span class="sourceLineNo">404</span>              LOG.error("Encountered unrecoverable error from region server", error);<a name="line.404"></a>
-<span class="sourceLineNo">405</span>              if (getConf().getBoolean(RETRY_ON_IO_EXCEPTION, false)<a name="line.405"></a>
-<span class="sourceLineNo">406</span>                  &amp;&amp; numRetries.get() &lt; getConf().getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>                    HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER)) {<a name="line.407"></a>
-<span class="sourceLineNo">408</span>                LOG.warn("Will attempt to retry loading failed HFiles. Retry #"<a name="line.408"></a>
-<span class="sourceLineNo">409</span>                    + numRetries.incrementAndGet());<a name="line.409"></a>
-<span class="sourceLineNo">410</span>                // return lqi's to retry<a name="line.410"></a>
-<span class="sourceLineNo">411</span>                future.complete(lqis);<a name="line.411"></a>
-<span class="sourceLineNo">412</span>              } else {<a name="line.412"></a>
-<span class="sourceLineNo">413</span>                LOG.error(RETRY_ON_IO_EXCEPTION<a name="line.413"></a>
-<span class="sourceLineNo">414</span>                    + " is disabled or we have reached retry limit. Unable to recover");<a name="line.414"></a>
-<span class="sourceLineNo">415</span>                future.completeExceptionally(error);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>              }<a name="line.416"></a>
-<span class="sourceLineNo">417</span>            } else {<a name="line.417"></a>
-<span class="sourceLineNo">418</span>              if (loaded) {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>                future.complete(Collections.emptyList());<a name="line.419"></a>
-<span class="sourceLineNo">420</span>              } else {<a name="line.420"></a>
-<span class="sourceLineNo">421</span>                LOG.warn("Attempt to bulk load region containing " + Bytes.toStringBinary(first)<a name="line.421"></a>
-<span class="sourceLineNo">422</span>                    + " into table " + tableName + " with files " + lqis<a name="line.422"></a>
-<span class="sourceLineNo">423</span>                    + " failed.  This is recoverable and they will be retried.");<a name="line.423"></a>
-<span class="sourceLineNo">424</span>                // return lqi's to retry<a name="line.424"></a>
-<span class="sourceLineNo">425</span>                future.complete(lqis);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>              }<a name="line.426"></a>
-<span class="sourceLineNo">427</span>            }<a name="line.427"></a>
-<span class="sourceLineNo">428</span>          });<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    return future;<a name="line.429"></a>
-<span class="sourceLineNo">430</span>  }<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>  /**<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * This takes the LQI's grouped by likely regions and attempts to bulk load them. Any failures are<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   * re-queued for another pass with the groupOrSplitPhase.<a name="line.434"></a>
-<span class="sourceLineNo">435</span>   * &lt;p/&gt;<a name="line.435"></a>
-<span class="sourceLineNo">436</span>   * protected for testing.<a name="line.436"></a>
-<span class="sourceLineNo">437</span>   */<a name="line.437"></a>
-<span class="sourceLineNo">438</span>  @VisibleForTesting<a name="line.438"></a>
-<span class="sourceLineNo">439</span>  protected void bulkLoadPhase(AsyncClusterConnection conn, TableName tableName,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      Deque&lt;LoadQueueItem&gt; queue, Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups,<a name="line.440"></a>
-<span class="sourceLineNo">441</span>      boolean copyFiles, Map&lt;LoadQueueItem, ByteBuffer&gt; item2RegionMap) throws IOException {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>    // atomically bulk load the groups.<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    List&lt;Future&lt;Collection&lt;LoadQueueItem&gt;&gt;&gt; loadingFutures = new ArrayList&lt;&gt;();<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    for (Entry&lt;ByteBuffer, ? extends Collection&lt;LoadQueueItem&gt;&gt; entry : regionGroups.asMap()<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        .entrySet()) {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>      byte[] first = entry.getKey().array();<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      final Collection&lt;LoadQueueItem&gt; lqis = entry.getValue();<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      if (bulkLoadByFamily) {<a name="line.448"></a>
-<span class="sourceLineNo">449</span>        groupByFamilies(lqis).values().forEach(familyQueue -&gt; loadingFutures<a name="line.449"></a>
-<span class="sourceLineNo">450</span>            .add(tryAtomicRegionLoad(conn, tableName, copyFiles, first, familyQueue)));<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      } else {<a name="line.451"></a>
-<span class="sourceLineNo">452</span>        loadingFutures.add(tryAtomicRegionLoad(conn, tableName, copyFiles, first, lqis));<a name="line.452"></a>
-<span class="sourceLineNo">453</span>      }<a name="line.453"></a>
-<span class="sourceLineNo">454</span>      if (item2RegionMap != null) {<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        for (LoadQueueItem lqi : lqis) {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          item2RegionMap.put(lqi, entry.getKey());<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    // get all the results.<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    for (Future&lt;Collection&lt;LoadQueueItem&gt;&gt; future : loadingFutures) {<a name="line.462"></a>
-<span class="sourceLineNo">463</span>      try {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>        Collection&lt;LoadQueueItem&gt; toRetry = future.get();<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>        if (item2RegionMap != null) {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          for (LoadQueueItem lqi : toRetry) {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>            item2RegionMap.remove(lqi);<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          }<a name="line.469"></a>
-<span class="sourceLineNo">470</span>        }<a name="line.470"></a>
-<span class="sourceLineNo">471</span>        // LQIs that are requeued to be regrouped.<a name="line.471"></a>
-<span class="sourceLineNo">472</span>        queue.addAll(toRetry);<a name="line.472"></a>
-<span class="sourceLineNo">473</span>      } catch (ExecutionException e1) {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>        Throwable t = e1.getCause();<a name="line.474"></a>
-<span class="sourceLineNo">475</span>        if (t instanceof IOException) {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>          // At this point something unrecoverable has happened.<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          // TODO Implement bulk load recovery<a name="line.477"></a>
-<span class="sourceLineNo">478</span>          throw new IOException("BulkLoad encountered an unrecoverable problem", t);<a name="line.478"></a>
-<span class="sourceLineNo">479</span>        }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>        LOG.error("Unexpected execution exception during bulk load", e1);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>        throw new IllegalStateException(t);<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      } catch (InterruptedException e1) {<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        LOG.error("Unexpected interrupted exception during bulk load", e1);<a name="line.483"></a>
-<span class="sourceLineNo">484</span>        throw (InterruptedIOException) new InterruptedIOException().initCause(e1);<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      }<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  private Map&lt;byte[], Collection&lt;LoadQueueItem&gt;&gt;<a name="line.489"></a>
-<span class="sourceLineNo">490</span>      groupByFamilies(Collection&lt;LoadQueueItem&gt; itemsInRegion) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    Map&lt;byte[], Collection&lt;LoadQueueItem&gt;&gt; families2Queue = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    itemsInRegion.forEach(item -&gt; families2Queue<a name="line.492"></a>
-<span class="sourceLineNo">493</span>        .computeIfAbsent(item.getFamily(), queue -&gt; new ArrayList&lt;&gt;()).add(item));<a name="line.493"></a>
-<span class="sourceLineNo">494</span>    return families2Queue;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  }<a name="line.495"></a>
-<span class="sourceLineNo">496</span><a name="line.496"></a>
-<span class="sourceLineNo">497</span>  private boolean checkHFilesCountPerRegionPerFamily(<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      final Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups) {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    for (Map.Entry&lt;ByteBuffer, Collection&lt;LoadQueueItem&gt;&gt; e : regionGroups.asMap().entrySet()) {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      Map&lt;byte[], MutableInt&gt; filesMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      for (LoadQueueItem lqi : e.getValue()) {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        MutableInt count = filesMap.computeIfAbsent(lqi.getFamily(), k -&gt; new MutableInt());<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        count.increment();<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        if (count.intValue() &gt; maxFilesPerRegionPerFamily) {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          LOG.error("Trying to load more than " + maxFilesPerRegionPerFamily +<a name="line.505"></a>
-<span class="sourceLineNo">506</span>            " hfiles to family " + Bytes.toStringBinary(lqi.getFamily()) +<a name="line.506"></a>
-<span class="sourceLineNo">507</span>            " of region with start key " + Bytes.toStringBinary(e.getKey()));<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          return false;<a name="line.508"></a>
-<span class="sourceLineNo">509</span>        }<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      }<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    }<a name="line.511"></a>
-<span class="sourceLineNo">512</span>    return true;<a name="line.512"></a>
-<span class="sourceLineNo">513</span>  }<a name="line.513"></a>
-<span class="sourceLineNo">514</span><a name="line.514"></a>
-<span class="sourceLineNo">515</span>  /**<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * @param conn the HBase cluster connection<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   * @param tableName the table name of the table to load into<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @param pool the ExecutorService<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   * @param queue the queue for LoadQueueItem<a name="line.519"></a>
-<span class="sourceLineNo">520</span>   * @param startEndKeys start and end keys<a name="line.520"></a>
-<span class="sourceLineNo">521</span>   * @return A map that groups LQI by likely bulk load region targets and Set of missing hfiles.<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   */<a name="line.522"></a>
-<span class="sourceLineNo">523</span>  private Pair&lt;Multimap&lt;ByteBuffer, LoadQueueItem&gt;, Set&lt;String&gt;&gt; groupOrSplitPhase(<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      AsyncClusterConnection conn, TableName tableName, ExecutorService pool,<a name="line.524"></a>
-<span class="sourceLineNo">525</span>      Deque&lt;LoadQueueItem&gt; queue, List&lt;Pair&lt;byte[], byte[]&gt;&gt; startEndKeys) throws IOException {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>    // &lt;region start key, LQI&gt; need synchronized only within this scope of this<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    // phase because of the puts that happen in futures.<a name="line.527"></a>
-<span class="sourceLineNo">528</span>    Multimap&lt;ByteBuffer, LoadQueueItem&gt; rgs = HashMultimap.create();<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    final Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups = Multimaps.synchronizedMultimap(rgs);<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    Set&lt;String&gt; missingHFiles = new HashSet&lt;&gt;();<a name="line.530"></a>
-<span class="sourceLineNo">531</span>    Pair&lt;Multimap&lt;ByteBuffer, LoadQueueItem&gt;, Set&lt;String&gt;&gt; pair =<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      new Pair&lt;&gt;(regionGroups, missingHFiles);<a name="line.532"></a>
-<span class="sourceLineNo">533</span><a name="line.533"></a>
-<span class="sourceLineNo">534</span>    // drain LQIs and figure out bulk load groups<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    Set&lt;Future&lt;Pair&lt;List&lt;LoadQueueItem&gt;, String&gt;&gt;&gt; splittingFutures = new HashSet&lt;&gt;();<a name="line.535"></a>
-<span class="sourceLineNo">536</span>    while (!queue.isEmpty()) {<a name="line.536"></a>
-<span class="sourceLineNo">537</span>      final LoadQueueItem item = queue.remove();<a name="line.537"></a>
-<span class="sourceLineNo">538</span><a name="line.538"></a>
-<span class="sourceLineNo">539</span>      final Callable&lt;Pair&lt;List&lt;LoadQueueItem&gt;, String&gt;&gt; call =<a name="line.539"></a>
-<span class="sourceLineNo">540</span>        () -&gt; groupOrSplit(conn, tableName, regionGroups, item, startEndKeys);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>      splittingFutures.add(pool.submit(call));<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    // get all the results. All grouping and splitting must finish before<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    // we can attempt the atomic loads.<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    for (Future&lt;Pair&lt;List&lt;LoadQueueItem&gt;, String&gt;&gt; lqis : splittingFutures) {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      try {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>        Pair&lt;List&lt;LoadQueueItem&gt;, String&gt; splits = lqis.get();<a name="line.547"></a>
-<span class="sourceLineNo">548</span>        if (splits != null) {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>          if (splits.getFirst() != null) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>            queue.addAll(splits.getFirst());<a name="line.550"></a>
-<span class="sourceLineNo">551</span>          } else {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>            missingHFiles.add(splits.getSecond());<a name="line.552"></a>
-<span class="sourceLineNo">553</span>          }<a name="line.553"></a>
+<span class="sourceLineNo">187</span>    return !HFileInfo.isReservedFileInfoKey(key);<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  }<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span>  /**<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * Checks whether there is any invalid family name in HFiles to be bulk loaded.<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   */<a name="line.192"></a>
+<span class="sourceLineNo">193</span>  private static void validateFamiliesInHFiles(TableDescriptor tableDesc,<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      Deque&lt;LoadQueueItem&gt; queue, boolean silence) throws IOException {<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    Set&lt;String&gt; familyNames = Arrays.stream(tableDesc.getColumnFamilies())<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      .map(ColumnFamilyDescriptor::getNameAsString).collect(Collectors.toSet());<a name="line.196"></a>
+<span class="sourceLineNo">197</span>    List&lt;String&gt; unmatchedFamilies = queue.stream().map(item -&gt; Bytes.toString(item.getFamily()))<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      .filter(fn -&gt; !familyNames.contains(fn)).distinct().collect(Collectors.toList());<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    if (unmatchedFamilies.size() &gt; 0) {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      String msg =<a name="line.200"></a>
+<span class="sourceLineNo">201</span>        "Unmatched family names found: unmatched family names in HFiles to be bulkloaded: " +<a name="line.201"></a>
+<span class="sourceLineNo">202</span>          unmatchedFamilies + "; valid family names of table " + tableDesc.getTableName() +<a name="line.202"></a>
+<span class="sourceLineNo">203</span>          " are: " + familyNames;<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      LOG.error(msg);<a name="line.204"></a>
+<span class="sourceLineNo">205</span>      if (!silence) {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>        throw new IOException(msg);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      }<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    }<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  }<a name="line.209"></a>
+<span class="sourceLineNo">210</span><a name="line.210"></a>
+<span class="sourceLineNo">211</span>  /**<a name="line.211"></a>
+<span class="sourceLineNo">212</span>   * Populate the Queue with given HFiles<a name="line.212"></a>
+<span class="sourceLineNo">213</span>   */<a name="line.213"></a>
+<span class="sourceLineNo">214</span>  private static void populateLoadQueue(Deque&lt;LoadQueueItem&gt; ret, Map&lt;byte[], List&lt;Path&gt;&gt; map) {<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    map.forEach((k, v) -&gt; v.stream().map(p -&gt; new LoadQueueItem(k, p)).forEachOrdered(ret::add));<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  }<a name="line.216"></a>
+<span class="sourceLineNo">217</span><a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private interface BulkHFileVisitor&lt;TFamily&gt; {<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>    TFamily bulkFamily(byte[] familyName) throws IOException;<a name="line.220"></a>
+<span class="sourceLineNo">221</span><a name="line.221"></a>
+<span class="sourceLineNo">222</span>    void bulkHFile(TFamily family, FileStatus hfileStatus) throws IOException;<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  }<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>  /**<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   * Iterate over the bulkDir hfiles. Skip reference, HFileLink, files starting with "_". Check and<a name="line.226"></a>
+<span class="sourceLineNo">227</span>   * skip non-valid hfiles by default, or skip this validation by setting {@link #VALIDATE_HFILES}<a name="line.227"></a>
+<span class="sourceLineNo">228</span>   * to false.<a name="line.228"></a>
+<span class="sourceLineNo">229</span>   */<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static &lt;TFamily&gt; void visitBulkHFiles(FileSystem fs, Path bulkDir,<a name="line.230"></a>
+<span class="sourceLineNo">231</span>      BulkHFileVisitor&lt;TFamily&gt; visitor, boolean validateHFile) throws IOException {<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    FileStatus[] familyDirStatuses = fs.listStatus(bulkDir);<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    for (FileStatus familyStat : familyDirStatuses) {<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      if (!familyStat.isDirectory()) {<a name="line.234"></a>
+<span class="sourceLineNo">235</span>        LOG.warn("Skipping non-directory " + familyStat.getPath());<a name="line.235"></a>
+<span class="sourceLineNo">236</span>        continue;<a name="line.236"></a>
+<span class="sourceLineNo">237</span>      }<a name="line.237"></a>
+<span class="sourceLineNo">238</span>      Path familyDir = familyStat.getPath();<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      byte[] familyName = Bytes.toBytes(familyDir.getName());<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      // Skip invalid family<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      try {<a name="line.241"></a>
+<span class="sourceLineNo">242</span>        ColumnFamilyDescriptorBuilder.isLegalColumnFamilyName(familyName);<a name="line.242"></a>
+<span class="sourceLineNo">243</span>      } catch (IllegalArgumentException e) {<a name="line.243"></a>
+<span class="sourceLineNo">244</span>        LOG.warn("Skipping invalid " + familyStat.getPath());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>        continue;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>      }<a name="line.246"></a>
+<span class="sourceLineNo">247</span>      TFamily family = visitor.bulkFamily(familyName);<a name="line.247"></a>
+<span class="sourceLineNo">248</span><a name="line.248"></a>
+<span class="sourceLineNo">249</span>      FileStatus[] hfileStatuses = fs.listStatus(familyDir);<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      for (FileStatus hfileStatus : hfileStatuses) {<a name="line.250"></a>
+<span class="sourceLineNo">251</span>        if (!fs.isFile(hfileStatus.getPath())) {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>          LOG.warn("Skipping non-file " + hfileStatus);<a name="line.252"></a>
+<span class="sourceLineNo">253</span>          continue;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        }<a name="line.254"></a>
+<span class="sourceLineNo">255</span><a name="line.255"></a>
+<span class="sourceLineNo">256</span>        Path hfile = hfileStatus.getPath();<a name="line.256"></a>
+<span class="sourceLineNo">257</span>        // Skip "_", reference, HFileLink<a name="line.257"></a>
+<span class="sourceLineNo">258</span>        String fileName = hfile.getName();<a name="line.258"></a>
+<span class="sourceLineNo">259</span>        if (fileName.startsWith("_")) {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>          continue;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        }<a name="line.261"></a>
+<span class="sourceLineNo">262</span>        if (StoreFileInfo.isReference(fileName)) {<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          LOG.warn("Skipping reference " + fileName);<a name="line.263"></a>
+<span class="sourceLineNo">264</span>          continue;<a name="line.264"></a>
+<span class="sourceLineNo">265</span>        }<a name="line.265"></a>
+<span class="sourceLineNo">266</span>        if (HFileLink.isHFileLink(fileName)) {<a name="line.266"></a>
+<span class="sourceLineNo">267</span>          LOG.warn("Skipping HFileLink " + fileName);<a name="line.267"></a>
+<span class="sourceLineNo">268</span>          continue;<a name="line.268"></a>
+<span class="sourceLineNo">269</span>        }<a name="line.269"></a>
+<span class="sourceLineNo">270</span><a name="line.270"></a>
+<span class="sourceLineNo">271</span>        // Validate HFile Format if needed<a name="line.271"></a>
+<span class="sourceLineNo">272</span>        if (validateHFile) {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>          try {<a name="line.273"></a>
+<span class="sourceLineNo">274</span>            if (!HFile.isHFileFormat(fs, hfile)) {<a name="line.274"></a>
+<span class="sourceLineNo">275</span>              LOG.warn("the file " + hfile + " doesn't seems to be an hfile. skipping");<a name="line.275"></a>
+<span class="sourceLineNo">276</span>              continue;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>            }<a name="line.277"></a>
+<span class="sourceLineNo">278</span>          } catch (FileNotFoundException e) {<a name="line.278"></a>
+<span class="sourceLineNo">279</span>            LOG.warn("the file " + hfile + " was removed");<a name="line.279"></a>
+<span class="sourceLineNo">280</span>            continue;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>          }<a name="line.281"></a>
+<span class="sourceLineNo">282</span>        }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>        visitor.bulkHFile(family, hfileStatus);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      }<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  }<a name="line.287"></a>
+<span class="sourceLineNo">288</span><a name="line.288"></a>
+<span class="sourceLineNo">289</span>  /**<a name="line.289"></a>
+<span class="sourceLineNo">290</span>   * Walk the given directory for all HFiles, and return a Queue containing all such files.<a name="line.290"></a>
+<span class="sourceLineNo">291</span>   */<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private static void discoverLoadQueue(Configuration conf, Deque&lt;LoadQueueItem&gt; ret, Path hfofDir,<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      boolean validateHFile) throws IOException {<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    visitBulkHFiles(hfofDir.getFileSystem(conf), hfofDir, new BulkHFileVisitor&lt;byte[]&gt;() {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      @Override<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      public byte[] bulkFamily(final byte[] familyName) {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>        return familyName;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>      }<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>      @Override<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      public void bulkHFile(final byte[] family, final FileStatus hfile) {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>        long length = hfile.getLen();<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        if (length &gt; conf.getLong(HConstants.HREGION_MAX_FILESIZE,<a name="line.303"></a>
+<span class="sourceLineNo">304</span>          HConstants.DEFAULT_MAX_FILE_SIZE)) {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>          LOG.warn("Trying to bulk load hfile " + hfile.getPath() + " with size: " + length +<a name="line.305"></a>
+<span class="sourceLineNo">306</span>            " bytes can be problematic as it may lead to oversplitting.");<a name="line.306"></a>
+<span class="sourceLineNo">307</span>        }<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        ret.add(new LoadQueueItem(family, hfile.getPath()));<a name="line.308"></a>
+<span class="sourceLineNo">309</span>      }<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    }, validateHFile);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Prepare a collection of {@code LoadQueueItem} from list of source hfiles contained in the<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * passed directory and validates whether the prepared queue has all the valid table column<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   * families in it.<a name="line.316"></a>
+<span class="sourceLineNo">317</span>   * @param map map of family to List of hfiles<a name="line.317"></a>
+<span class="sourceLineNo">318</span>   * @param tableName table to which hfiles should be loaded<a name="line.318"></a>
+<span class="sourceLineNo">319</span>   * @param queue queue which needs to be loaded into the table<a name="line.319"></a>
+<span class="sourceLineNo">320</span>   * @param silence true to ignore unmatched column families<a name="line.320"></a>
+<span class="sourceLineNo">321</span>   * @throws IOException If any I/O or network error occurred<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   */<a name="line.322"></a>
+<span class="sourceLineNo">323</span>  public static void prepareHFileQueue(AsyncClusterConnection conn, TableName tableName,<a name="line.323"></a>
+<span class="sourceLineNo">324</span>      Map&lt;byte[], List&lt;Path&gt;&gt; map, Deque&lt;LoadQueueItem&gt; queue, boolean silence) throws IOException {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    populateLoadQueue(queue, map);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>    validateFamiliesInHFiles(FutureUtils.get(conn.getAdmin().getDescriptor(tableName)), queue,<a name="line.326"></a>
+<span class="sourceLineNo">327</span>      silence);<a name="line.327"></a>
+<span class="sourceLineNo">328</span>  }<a name="line.328"></a>
+<span class="sourceLineNo">329</span><a name="line.329"></a>
+<span class="sourceLineNo">330</span>  /**<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * Prepare a collection of {@code LoadQueueItem} from list of source hfiles contained in the<a name="line.331"></a>
+<span class="sourceLineNo">332</span>   * passed directory and validates whether the prepared queue has all the valid table column<a name="line.332"></a>
+<span class="sourceLineNo">333</span>   * families in it.<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * @param hfilesDir directory containing list of hfiles to be loaded into the table<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   * @param queue queue which needs to be loaded into the table<a name="line.335"></a>
+<span class="sourceLineNo">336</span>   * @param validateHFile if true hfiles will be validated for its format<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * @param silence true to ignore unmatched column families<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * @throws IOException If any I/O or network error occurred<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   */<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  public static void prepareHFileQueue(Configuration conf, AsyncClusterConnection conn,<a name="line.340"></a>
+<span class="sourceLineNo">341</span>      TableName tableName, Path hfilesDir, Deque&lt;LoadQueueItem&gt; queue, boolean validateHFile,<a name="line.341"></a>
+<span class="sourceLineNo">342</span>      boolean silence) throws IOException {<a name="line.342"></a>
+<span class="sourceLineNo">343</span>    discoverLoadQueue(conf, queue, hfilesDir, validateHFile);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    validateFamiliesInHFiles(FutureUtils.get(conn.getAdmin().getDescriptor(tableName)), queue,<a name="line.344"></a>
+<span class="sourceLineNo">345</span>      silence);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  }<a name="line.346"></a>
+<span class="sourceLineNo">347</span><a name="line.347"></a>
+<span class="sourceLineNo">348</span>  /**<a name="line.348"></a>
+<span class="sourceLineNo">349</span>   * Used by the replication sink to load the hfiles from the source cluster. It does the following,<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * &lt;ol&gt;<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   * &lt;li&gt;{@link #groupOrSplitPhase(AsyncClusterConnection, TableName, ExecutorService, Deque, List)}<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * &lt;/li&gt;<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * &lt;li&gt;{@link #bulkLoadPhase(AsyncClusterConnection, TableName, Deque, Multimap, boolean, Map)}<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * &lt;/li&gt;<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   * &lt;/ol&gt;<a name="line.355"></a>
+<span class="sourceLineNo">356</span>   * @param conn Connection to use<a name="line.356"></a>
+<span class="sourceLineNo">357</span>   * @param tableName Table to which these hfiles should be loaded to<a name="line.357"></a>
+<span class="sourceLineNo">358</span>   * @param queue {@code LoadQueueItem} has hfiles yet to be loaded<a name="line.358"></a>
+<span class="sourceLineNo">359</span>   */<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  public void loadHFileQueue(AsyncClusterConnection conn, TableName tableName,<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      Deque&lt;LoadQueueItem&gt; queue, boolean copyFiles) throws IOException {<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    ExecutorService pool = createExecutorService();<a name="line.362"></a>
+<span class="sourceLineNo">363</span>    try {<a name="line.363"></a>
+<span class="sourceLineNo">364</span>      Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups = groupOrSplitPhase(conn, tableName, pool,<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        queue, FutureUtils.get(conn.getRegionLocator(tableName).getStartEndKeys())).getFirst();<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      bulkLoadPhase(conn, tableName, queue, regionGroups, copyFiles, null);<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    } finally {<a name="line.367"></a>
+<span class="sourceLineNo">368</span>      pool.shutdown();<a name="line.368"></a>
+<span class="sourceLineNo">369</span>    }<a name="line.369"></a>
+<span class="sourceLineNo">370</span>  }<a name="line.370"></a>
+<span class="sourceLineNo">371</span><a name="line.371"></a>
+<span class="sourceLineNo">372</span>  /**<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   * Attempts to do an atomic load of many hfiles into a region. If it fails, it returns a list of<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   * hfiles that need to be retried. If it is successful it will return an empty list. NOTE: To<a name="line.374"></a>
+<span class="sourceLineNo">375</span>   * maintain row atomicity guarantees, region server side should succeed atomically and fails<a name="line.375"></a>
+<span class="sourceLineNo">376</span>   * atomically.<a name="line.376"></a>
+<span class="sourceLineNo">377</span>   * @param conn Connection to use<a name="line.377"></a>
+<span class="sourceLineNo">378</span>   * @param tableName Table to which these hfiles should be loaded to<a name="line.378"></a>
+<span class="sourceLineNo">379</span>   * @param copyFiles whether replicate to peer cluster while bulkloading<a name="line.379"></a>
+<span class="sourceLineNo">380</span>   * @param first the start key of region<a name="line.380"></a>
+<span class="sourceLineNo">381</span>   * @param lqis hfiles should be loaded<a name="line.381"></a>
+<span class="sourceLineNo">382</span>   * @return empty list if success, list of items to retry on recoverable failure<a name="line.382"></a>
+<span class="sourceLineNo">383</span>   */<a name="line.383"></a>
+<span class="sourceLineNo">384</span>  @VisibleForTesting<a name="line.384"></a>
+<span class="sourceLineNo">385</span>  protected CompletableFuture&lt;Collection&lt;LoadQueueItem&gt;&gt; tryAtomicRegionLoad(<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      final AsyncClusterConnection conn, final TableName tableName, boolean copyFiles,<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      final byte[] first, Collection&lt;LoadQueueItem&gt; lqis) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>    List&lt;Pair&lt;byte[], String&gt;&gt; familyPaths =<a name="line.388"></a>
+<span class="sourceLineNo">389</span>        lqis.stream().map(lqi -&gt; Pair.newPair(lqi.getFamily(), lqi.getFilePath().toString()))<a name="line.389"></a>
+<span class="sourceLineNo">390</span>            .collect(Collectors.toList());<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    CompletableFuture&lt;Collection&lt;LoadQueueItem&gt;&gt; future = new CompletableFuture&lt;&gt;();<a name="line.391"></a>
+<span class="sourceLineNo">392</span>    FutureUtils<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        .addListener(<a name="line.393"></a>
+<span class="sourceLineNo">394</span>          conn.bulkLoad(tableName, familyPaths, first, assignSeqIds,<a name="line.394"></a>
+<span class="sourceLineNo">395</span>            fsDelegationToken.getUserToken(), bulkToken, copyFiles, clusterIds, replicate),<a name="line.395"></a>
+<span class="sourceLineNo">396</span>          (loaded, error) -&gt; {<a name="line.396"></a>
+<span class="sourceLineNo">397</span>            if (error != null) {<a name="line.397"></a>
+<span class="sourceLineNo">398</span>              LOG.error("Encountered unrecoverable error from region server", error);<a name="line.398"></a>
+<span class="sourceLineNo">399</span>              if (getConf().getBoolean(RETRY_ON_IO_EXCEPTION, false)<a name="line.399"></a>
+<span class="sourceLineNo">400</span>                  &amp;&amp; numRetries.get() &lt; getConf().getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.400"></a>
+<span class="sourceLineNo">401</span>                    HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER)) {<a name="line.401"></a>
+<span class="sourceLineNo">402</span>                LOG.warn("Will attempt to retry loading failed HFiles. Retry #"<a name="line.402"></a>
+<span class="sourceLineNo">403</span>                    + numRetries.incrementAndGet());<a name="line.403"></a>
+<span class="sourceLineNo">404</span>                // return lqi's to retry<a name="line.404"></a>
+<span class="sourceLineNo">405</span>                future.complete(lqis);<a name="line.405"></a>
+<span class="sourceLineNo">406</span>              } else {<a name="line.406"></a>
+<span class="sourceLineNo">407</span>                LOG.error(RETRY_ON_IO_EXCEPTION<a name="line.407"></a>
+<span class="sourceLineNo">408</span>                    + " is disabled or we have reached retry limit. Unable to recover");<a name="line.408"></a>
+<span class="sourceLineNo">409</span>                future.completeExceptionally(error);<a name="line.409"></a>
+<span class="sourceLineNo">410</span>              }<a name="line.410"></a>
+<span class="sourceLineNo">411</span>            } else {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>              if (loaded) {<a name="line.412"></a>
+<span class="sourceLineNo">413</span>                future.complete(Collections.emptyList());<a name="line.413"></a>
+<span class="sourceLineNo">414</span>              } else {<a name="line.414"></a>
+<span class="sourceLineNo">415</span>                LOG.warn("Attempt to bulk load region containing " + Bytes.toStringBinary(first)<a name="line.415"></a>
+<span class="sourceLineNo">416</span>                    + " into table " + tableName + " with files " + lqis<a name="line.416"></a>
+<span class="sourceLineNo">417</span>                    + " failed.  This is recoverable and they will be retried.");<a name="line.417"></a>
+<span class="sourceLineNo">418</span>                // return lqi's to retry<a name="line.418"></a>
+<span class="sourceLineNo">419</span>                future.complete(lqis);<a name="line.419"></a>
+<span class="sourceLineNo">420</span>              }<a name="line.420"></a>
+<span class="sourceLineNo">421</span>            }<a name="line.421"></a>
+<span class="sourceLineNo">422</span>          });<a name="line.422"></a>
+<span class="sourceLineNo">423</span>    return future;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>  }<a name="line.424"></a>
+<span class="sourceLineNo">425</span><a name="line.425"></a>
+<span class="sourceLineNo">426</span>  /**<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   * This takes the LQI's grouped by likely regions and attempts to bulk load them. Any failures are<a name="line.427"></a>
+<span class="sourceLineNo">428</span>   * re-queued for another pass with the groupOrSplitPhase.<a name="line.428"></a>
+<span class="sourceLineNo">429</span>   * &lt;p/&gt;<a name="line.429"></a>
+<span class="sourceLineNo">430</span>   * protected for testing.<a name="line.430"></a>
+<span class="sourceLineNo">431</span>   */<a name="line.431"></a>
+<span class="sourceLineNo">432</span>  @VisibleForTesting<a name="line.432"></a>
+<span class="sourceLineNo">433</span>  protected void bulkLoadPhase(AsyncClusterConnection conn, TableName tableName,<a name="line.433"></a>
+<span class="sourceLineNo">434</span>      Deque&lt;LoadQueueItem&gt; queue, Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups,<a name="line.434"></a>
+<span class="sourceLineNo">435</span>      boolean copyFiles, Map&lt;LoadQueueItem, ByteBuffer&gt; item2RegionMap) throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>    // atomically bulk load the groups.<a name="line.436"></a>
+<span class="sourceLineNo">437</span>    List&lt;Future&lt;Collection&lt;LoadQueueItem&gt;&gt;&gt; loadingFutures = new ArrayList&lt;&gt;();<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    for (Entry&lt;ByteBuffer, ? extends Collection&lt;LoadQueueItem&gt;&gt; entry : regionGroups.asMap()<a name="line.438"></a>
+<span class="sourceLineNo">439</span>        .entrySet()) {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>      byte[] first = entry.getKey().array();<a name="line.440"></a>
+<span class="sourceLineNo">441</span>      final Collection&lt;LoadQueueItem&gt; lqis = entry.getValue();<a name="line.441"></a>
+<span class="sourceLineNo">442</span>      if (bulkLoadByFamily) {<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        groupByFamilies(lqis).values().forEach(familyQueue -&gt; loadingFutures<a name="line.443"></a>
+<span class="sourceLineNo">444</span>            .add(tryAtomicRegionLoad(conn, tableName, copyFiles, first, familyQueue)));<a name="line.444"></a>
+<span class="sourceLineNo">445</span>      } else {<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        loadingFutures.add(tryAtomicRegionLoad(conn, tableName, copyFiles, first, lqis));<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      }<a name="line.447"></a>
+<span class="sourceLineNo">448</span>      if (item2RegionMap != null) {<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        for (LoadQueueItem lqi : lqis) {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>          item2RegionMap.put(lqi, entry.getKey());<a name="line.450"></a>
+<span class="sourceLineNo">451</span>        }<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      }<a name="line.452"></a>
+<span class="sourceLineNo">453</span>    }<a name="line.453"></a>
+<span class="sourceLineNo">454</span><a name="line.454"></a>
+<span class="sourceLineNo">455</span>    // get all the results.<a name="line.455"></a>
+<span class="sourceLineNo">456</span>    for (Future&lt;Collection&lt;LoadQueueItem&gt;&gt; future : loadingFutures) {<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      try {<a name="line.457"></a>
+<span class="sourceLineNo">458</span>        Collection&lt;LoadQueueItem&gt; toRetry = future.get();<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>        if (item2RegionMap != null) {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>          for (LoadQueueItem lqi : toRetry) {<a name="line.461"></a>
+<span class="sourceLineNo">462</span>            item2RegionMap.remove(lqi);<a name="line.462"></a>
+<span class="sourceLineNo">463</span>          }<a name="line.463"></a>
+<span class="sourceLineNo">464</span>        }<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        // LQIs that are requeued to be regrouped.<a name="line.465"></a>
+<span class="sourceLineNo">466</span>        queue.addAll(toRetry);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>      } catch (ExecutionException e1) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>        Throwable t = e1.getCause();<a name="line.468"></a>
+<span class="sourceLineNo">469</span>        if (t instanceof IOException) {<a name="line.469"></a>
+<span class="sourceLineNo">470</span>          // At this point something unrecoverable has happened.<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          // TODO Implement bulk load recovery<a name="line.471"></a>
+<span class="sourceLineNo">472</span>          throw new IOException("BulkLoad encountered an unrecoverable problem", t);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>        }<a name="line.473"></a>
+<span class="sourceLineNo">474</span>        LOG.error("Unexpected execution exception during bulk load", e1);<a name="line.474"></a>
+<span class="sourceLineNo">475</span>        throw new IllegalStateException(t);<a name="line.475"></a>
+<span class="sourceLineNo">476</span>      } catch (InterruptedException e1) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>        LOG.error("Unexpected interrupted exception during bulk load", e1);<a name="line.477"></a>
+<span class="sourceLineNo">478</span>        throw (InterruptedIOException) new InterruptedIOException().initCause(e1);<a name="line.478"></a>
+<span class="sourceLineNo">479</span>      }<a name="line.479"></a>
+<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>  }<a name="line.481"></a>
+<span class="sourceLineNo">482</span><a name="line.482"></a>
+<span class="sourceLineNo">483</span>  private Map&lt;byte[], Collection&lt;LoadQueueItem&gt;&gt;<a name="line.483"></a>
+<span class="sourceLineNo">484</span>      groupByFamilies(Collection&lt;LoadQueueItem&gt; itemsInRegion) {<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    Map&lt;byte[], Collection&lt;LoadQueueItem&gt;&gt; families2Queue = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.485"></a>
+<span class="sourceLineNo">486</span>    itemsInRegion.forEach(item -&gt; families2Queue<a name="line.486"></a>
+<span class="sourceLineNo">487</span>        .computeIfAbsent(item.getFamily(), queue -&gt; new ArrayList&lt;&gt;()).add(item));<a name="line.487"></a>
+<span class="sourceLineNo">488</span>    return families2Queue;<a name="line.488"></a>
+<span class="sourceLineNo">489</span>  }<a name="line.489"></a>
+<span class="sourceLineNo">490</span><a name="line.490"></a>
+<span class="sourceLineNo">491</span>  private boolean checkHFilesCountPerRegionPerFamily(<a name="line.491"></a>
+<span class="sourceLineNo">492</span>      final Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups) {<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    for (Map.Entry&lt;ByteBuffer, Collection&lt;LoadQueueItem&gt;&gt; e : regionGroups.asMap().entrySet()) {<a name="line.493"></a>
+<span class="sourceLineNo">494</span>      Map&lt;byte[], MutableInt&gt; filesMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      for (LoadQueueItem lqi : e.getValue()) {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>        MutableInt count = filesMap.computeIfAbsent(lqi.getFamily(), k -&gt; new MutableInt());<a name="line.496"></a>
+<span class="sourceLineNo">497</span>        count.increment();<a name="line.497"></a>
+<span class="sourceLineNo">498</span>        if (count.intValue() &gt; maxFilesPerRegionPerFamily) {<a name="line.498"></a>
+<span class="sourceLineNo">499</span>          LOG.error("Trying to load more than " + maxFilesPerRegionPerFamily +<a name="line.499"></a>
+<span class="sourceLineNo">500</span>            " hfiles to family " + Bytes.toStringBinary(lqi.getFamily()) +<a name="line.500"></a>
+<span class="sourceLineNo">501</span>            " of region with start key " + Bytes.toStringBinary(e.getKey()));<a name="line.501"></a>
+<span class="sourceLineNo">502</span>          return false;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>        }<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      }<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    }<a name="line.505"></a>
+<span class="sourceLineNo">506</span>    return true;<a name="line.506"></a>
+<span class="sourceLineNo">507</span>  }<a name="line.507"></a>
+<span class="sourceLineNo">508</span><a name="line.508"></a>
+<span class="sourceLineNo">509</span>  /**<a name="line.509"></a>
+<span class="sourceLineNo">510</span>   * @param conn the HBase cluster connection<a name="line.510"></a>
+<span class="sourceLineNo">511</span>   * @param tableName the table name of the table to load into<a name="line.511"></a>
+<span class="sourceLineNo">512</span>   * @param pool the ExecutorService<a name="line.512"></a>
+<span class="sourceLineNo">513</span>   * @param queue the queue for LoadQueueItem<a name="line.513"></a>
+<span class="sourceLineNo">514</span>   * @param startEndKeys start and end keys<a name="line.514"></a>
+<span class="sourceLineNo">515</span>   * @return A map that groups LQI by likely bulk load region targets and Set of missing hfiles.<a name="line.515"></a>
+<span class="sourceLineNo">516</span>   */<a name="line.516"></a>
+<span class="sourceLineNo">517</span>  private Pair&lt;Multimap&lt;ByteBuffer, LoadQueueItem&gt;, Set&lt;String&gt;&gt; groupOrSplitPhase(<a name="line.517"></a>
+<span class="sourceLineNo">518</span>      AsyncClusterConnection conn, TableName tableName, ExecutorService pool,<a name="line.518"></a>
+<span class="sourceLineNo">519</span>      Deque&lt;LoadQueueItem&gt; queue, List&lt;Pair&lt;byte[], byte[]&gt;&gt; startEndKeys) throws IOException {<a name="line.519"></a>
+<span class="sourceLineNo">520</span>    // &lt;region start key, LQI&gt; need synchronized only within this scope of this<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    // phase because of the puts that happen in futures.<a name="line.521"></a>
+<span class="sourceLineNo">522</span>    Multimap&lt;ByteBuffer, LoadQueueItem&gt; rgs = HashMultimap.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>    final Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups = Multimaps.synchronizedMultimap(rgs);<a name="line.523"></a>
+<span class="sourceLineNo">524</span>    Set&lt;String&gt; missingHFiles = new HashSet&lt;&gt;();<a name="line.524"></a>
+<span class="sourceLineNo">525</span>    Pair&lt;Multimap&lt;ByteBuffer, LoadQueueItem&gt;, Set&lt;String&gt;&gt; pair =<a name="line.525"></a>
+<span class="sourceLineNo">526</span>      new Pair&lt;&gt;(regionGroups, missingHFiles);<a name="line.526"></a>
+<span class="sourceLineNo">527</span><a name="line.527"></a>
+<span class="sourceLineNo">528</span>    // drain LQIs and figure out bulk load groups<a name="line.528"></a>
+<span class="sourceLineNo">529</span>    Set&lt;Future&lt;Pair&lt;List&lt;LoadQueueItem&gt;, String&gt;&gt;&gt; splittingFutures = new HashSet&lt;&gt;();<a name="line.529"></a>
+<span class="sourceLineNo">530</span>    while (!queue.isEmpty()) {<a name="line.530"></a>
+<span class="sourceLineNo">531</span>      final LoadQueueItem item = queue.remove();<a name="line.531"></a>
+<span class="sourceLineNo">532</span><a name="line.532"></a>
+<span class="sourceLineNo">533</span>      final Callable&lt;Pair&lt;List&lt;LoadQueueItem&gt;, String&gt;&gt; call =<a name="line.533"></a>
+<span class="sourceLineNo">534</span>        () -&gt; groupOrSplit(conn, tableName, regionGroups, item, startEndKeys);<a name="line.534"></a>
+<span class="sourceLineNo">535</span>      splittingFutures.add(pool.submit(call));<a name="line.535"></a>
+<span class="sourceLineNo">536</span>    }<a name="line.536"></a>
+<span class="sourceLineNo">537</span>    // get all the results. All grouping and splitting must finish before<a name="line.537"></a>
+<span class="sourceLineNo">538</span>    // we can attempt the atomic loads.<a name="line.538"></a>
+<span class="sourceLineNo">539</span>    for (Future&lt;Pair&lt;List&lt;LoadQueueItem&gt;, String&gt;&gt; lqis : splittingFutures) {<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      try {<a name="line.540"></a>
+<span class="sourceLineNo">541</span>        Pair&lt;List&lt;LoadQueueItem&gt;, String&gt; splits = lqis.get();<a name="line.541"></a>
+<span class="sourceLineNo">542</span>        if (splits != null) {<a name="line.542"></a>
+<span class="sourceLineNo">543</span>          if (splits.getFirst() != null) {<a name="line.543"></a>
+<span class="sourceLineNo">544</span>            queue.addAll(splits.getFirst());<a name="line.544"></a>
+<span class="sourceLineNo">545</span>          } else {<a name="line.545"></a>
+<span class="sourceLineNo">546</span>            missingHFiles.add(splits.getSecond());<a name="line.546"></a>
+<span class="sourceLineNo">547</span>          }<a name="line.547"></a>
+<span class="sourceLineNo">548</span>        }<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      } catch (ExecutionException e1) {<a name="line.549"></a>
+<span class="sourceLineNo">550</span>        Throwable t = e1.getCause();<a name="line.550"></a>
+<span class="sourceLineNo">551</span>        if (t instanceof IOException) {<a name="line.551"></a>
+<span class="sourceLineNo">552</span>          LOG.error("IOException during splitting", e1);<a name="line.552"></a>
+<span class="sourceLineNo">553</span>          throw (IOException) t; // would have been thrown if not parallelized,<a name="line.553"></a>
 <span class="sourceLineNo">554</span>        }<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      } catch (ExecutionException e1) {<a name="line.555"></a>
-<span class="sourceLineNo">556</span>        Throwable t = e1.getCause();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>        if (t instanceof IOException) {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          LOG.error("IOException during splitting", e1);<a name="line.558"></a>
-<span class="sourceLineNo">559</span>          throw (IOException) t; // would have been thrown if not parallelized,<a name="line.559"></a>
-<span class="sourceLineNo">560</span>        }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        LOG.error("Unexpected execution exception during splitting", e1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        throw new IllegalStateException(t);<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      } catch (InterruptedException e1) {<a name="line.563"></a>
-<span class="sourceLineNo">564</span>        LOG.error("Unexpected interrupted exception during splitting", e1);<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw (InterruptedIOException) new InterruptedIOException().initCause(e1);<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span>    }<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    return pair;<a name="line.568"></a>
-<span class="sourceLineNo">569</span>  }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>  // unique file name for the table<a name="line.571"></a>
-<span class="sourceLineNo">572</span>  private String getUniqueName() {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    return UUID.randomUUID().toString().replaceAll("-", "");<a name="line.573"></a>
-<span class="sourceLineNo">574</span>  }<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>  private List&lt;LoadQueueItem&gt; splitStoreFile(LoadQueueItem item, TableDescriptor tableDesc,<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      byte[] splitKey) throws IOException {<a name="line.577"></a>
-<span class="sourceLineNo">578</span>    Path hfilePath = item.getFilePath();<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    byte[] family = item.getFamily();<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    Path tmpDir = hfilePath.getParent();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>    if (!tmpDir.getName().equals(TMP_DIR)) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      tmpDir = new Path(tmpDir, TMP_DIR);<a name="line.582"></a>
-<span class="sourceLineNo">583</span>    }<a name="line.583"></a>
-<span class="sourceLineNo">584</span><a name="line.584"></a>
-<span class="sourceLineNo">585</span>    LOG.info("HFile at " + hfilePath + " no longer fits inside a single " + "region. Splitting...");<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>    String uniqueName = getUniqueName();<a name="line.587"></a>
-<span class="sourceLineNo">588</span>    ColumnFamilyDescriptor familyDesc = tableDesc.getColumnFamily(family);<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>    Path botOut = new Path(tmpDir, uniqueName + ".bottom");<a name="line.590"></a>
-<span class="sourceLineNo">591</span>    Path topOut = new Path(tmpDir, uniqueName + ".top");<a name="line.591"></a>
-<span class="sourceLineNo">592</span>    splitStoreFile(getConf(), hfilePath, familyDesc, splitKey, botOut, topOut);<a name="line.592"></a>
-<span class="sourceLineNo">593</span><a name="line.593"></a>
-<span class="sourceLineNo">594</span>    FileSystem fs = tmpDir.getFileSystem(getConf());<a name="line.594"></a>
-<span class="sourceLineNo">595</span>    fs.setPermission(tmpDir, FsPermission.valueOf("-rwxrwxrwx"));<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    fs.setPermission(botOut, FsPermission.valueOf("-rwxrwxrwx"));<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    fs.setPermission(topOut, FsPermission.valueOf("-rwxrwxrwx"));<a name="line.597"></a>
+<span class="sourceLineNo">555</span>        LOG.error("Unexpected execution exception during splitting", e1);<a name="line.555"></a>
+<span class="sourceLineNo">556</span>        throw new IllegalStateException(t);<a name="line.556"></a>
+<span class="sourceLineNo">557</span>      } catch (InterruptedException e1) {<a name="line.557"></a>
+<span class="sourceLineNo">558</span>        LOG.error("Unexpected interrupted exception during splitting", e1);<a name="line.558"></a>
+<span class="sourceLineNo">559</span>        throw (InterruptedIOException) new InterruptedIOException().initCause(e1);<a name="line.559"></a>
+<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
+<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
+<span class="sourceLineNo">562</span>    return pair;<a name="line.562"></a>
+<span class="sourceLineNo">563</span>  }<a name="line.563"></a>
+<span class="sourceLineNo">564</span><a name="line.564"></a>
+<span class="sourceLineNo">565</span>  // unique file name for the table<a name="line.565"></a>
+<span class="sourceLineNo">566</span>  private String getUniqueName() {<a name="line.566"></a>
+<span class="sourceLineNo">567</span>    return UUID.randomUUID().toString().replaceAll("-", "");<a name="line.567"></a>
+<span class="sourceLineNo">568</span>  }<a name="line.568"></a>
+<span class="sourceLineNo">569</span><a name="line.569"></a>
+<span class="sourceLineNo">570</span>  private List&lt;LoadQueueItem&gt; splitStoreFile(LoadQueueItem item, TableDescriptor tableDesc,<a name="line.570"></a>
+<span class="sourceLineNo">571</span>      byte[] splitKey) throws IOException {<a name="line.571"></a>
+<span class="sourceLineNo">572</span>    Path hfilePath = item.getFilePath();<a name="line.572"></a>
+<span class="sourceLineNo">573</span>    byte[] family = item.getFamily();<a name="line.573"></a>
+<span class="sourceLineNo">574</span>    Path tmpDir = hfilePath.getParent();<a name="line.574"></a>
+<span class="sourceLineNo">575</span>    if (!tmpDir.getName().equals(TMP_DIR)) {<a name="line.575"></a>
+<span class="sourceLineNo">576</span>      tmpDir = new Path(tmpDir, TMP_DIR);<a name="line.576"></a>
+<span class="sourceLineNo">577</span>    }<a name="line.577"></a>
+<span class="sourceLineNo">578</span><a name="line.578"></a>
+<span class="sourceLineNo">579</span>    LOG.info("HFile at " + hfilePath + " no longer fits inside a single " + "region. Splitting...");<a name="line.579"></a>
+<span class="sourceLineNo">580</span><a name="line.580"></a>
+<span class="sourceLineNo">581</span>    String uniqueName = getUniqueName();<a name="line.581"></a>
+<span class="sourceLineNo">582</span>    ColumnFamilyDescriptor familyDesc = tableDesc.getColumnFamily(family);<a name="line.582"></a>
+<span class="sourceLineNo">583</span><a name="line.583"></a>
+<span class="sourceLineNo">584</span>    Path botOut = new Path(tmpDir, uniqueName + ".bottom");<a name="line.584"></a>
+<span class="sourceLineNo">585</span>    Path topOut = new Path(tmpDir, uniqueName + ".top");<a name="line.585"></a>
+<span class="sourceLineNo">586</span>    splitStoreFile(getConf(), hfilePath, familyDesc, splitKey, botOut, topOut);<a name="line.586"></a>
+<span class="sourceLineNo">587</span><a name="line.587"></a>
+<span class="sourceLineNo">588</span>    FileSystem fs = tmpDir.getFileSystem(getConf());<a name="line.588"></a>
+<span class="sourceLineNo">589</span>    fs.setPermission(tmpDir, FsPermission.valueOf("-rwxrwxrwx"));<a name="line.589"></a>
+<span class="sourceLineNo">590</span>    fs.setPermission(botOut, FsPermission.valueOf("-rwxrwxrwx"));<a name="line.590"></a>
+<span class="sourceLineNo">591</span>    fs.setPermission(topOut, FsPermission.valueOf("-rwxrwxrwx"));<a name="line.591"></a>
+<span class="sourceLineNo">592</span><a name="line.592"></a>
+<span class="sourceLineNo">593</span>    // Add these back at the *front* of the queue, so there's a lower<a name="line.593"></a>
+<span class="sourceLineNo">594</span>    // chance that the region will just split again before we get there.<a name="line.594"></a>
+<span class="sourceLineNo">595</span>    List&lt;LoadQueueItem&gt; lqis = new ArrayList&lt;&gt;(2);<a name="line.595"></a>
+<span class="sourceLineNo">596</span>    lqis.add(new LoadQueueItem(family, botOut));<a name="line.596"></a>
+<span class="sourceLineNo">597</span>    lqis.add(new LoadQueueItem(family, topOut));<a name="line.597"></a>
 <span class="sourceLineNo">598</span><a name="line.598"></a>
-<span class="sourceLineNo">599</span>    // Add these back at the *front* of the queue, so there's a lower<a name="line.599"></a>
-<span class="sourceLineNo">600</span>    // chance that the region will just split again before we get there.<a name="line.600"></a>
-<span class="sourceLineNo">601</span>    List&lt;LoadQueueItem&gt; lqis = new ArrayList&lt;&gt;(2);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    lqis.add(new LoadQueueItem(family, botOut));<a name="line.602"></a>
-<span class="sourceLineNo">603</span>    lqis.add(new LoadQueueItem(family, topOut));<a name="line.603"></a>
-<span class="sourceLineNo">604</span><a name="line.604"></a>
-<span class="sourceLineNo">605</span>    // If the current item is already the result of previous splits,<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    // we don't need it anymore. Clean up to save space.<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    // It is not part of the original input files.<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    try {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      if (tmpDir.getName().equals(TMP_DIR)) {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>        fs.delete(hfilePath, false);<a name="line.610"></a>
-<span class="sourceLineNo">611</span>      }<a name="line.611"></a>
-<span class="sourceLineNo">612</span>    } catch (IOException e) {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      LOG.warn("Unable to delete temporary split file " + hfilePath);<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    }<a name="line.614"></a>
-<span class="sourceLineNo">615</span>    LOG.info("Successfully split into new HFiles " + botOut + " and " + topOut);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    return lqis;<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  }<a name="line.617"></a>
-<span class="sourceLineNo">618</span><a name="line.618"></a>
-<span class="sourceLineNo">619</span>  /**<a name="line.619"></a>
-<span class="sourceLineNo">620</span>   * Attempt to assign the given load queue item into its target region group. If the hfile boundary<a name="line.620"></a>
-<span class="sourceLineNo">621</span>   * no longer fits into a region, physically splits the hfile such that the new bottom half will<a name="line.621"></a>
-<span class="sourceLineNo">622</span>   * fit and returns the list of LQI's corresponding to the resultant hfiles.<a name="line.622"></a>
-<span class="sourceLineNo">623</span>   * &lt;p/&gt;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>   * protected for testing<a name="line.624"></a>
-<span class="sourceLineNo">625</span>   * @throws IOException if an IO failure is encountered<a name="line.625"></a>
-<span class="sourceLineNo">626</span>   */<a name="line.626"></a>
-<span class="sourceLineNo">627</span>  @VisibleForTesting<a name="line.627"></a>
-<span class="sourceLineNo">628</span>  protected Pair&lt;List&lt;LoadQueueItem&gt;, String&gt; groupOrSplit(AsyncClusterConnection conn,<a name="line.628"></a>
-<span class="sourceLineNo">629</span>      TableName tableName, Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups, LoadQueueItem item,<a name="line.629"></a>
-<span class="sourceLineNo">630</span>      List&lt;Pair&lt;byte[], byte[]&gt;&gt; startEndKeys) throws IOException {<a name="line.630"></a>
-<span class="sourceLineNo">631</span>    Path hfilePath = item.getFilePath();<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    Optional&lt;byte[]&gt; first, last;<a name="line.632"></a>
-<span class="sourceLineNo">633</span>    try (HFile.Reader hfr = HFile.createReader(hfilePath.getFileSystem(getConf()), hfilePath,<a name="line.633"></a>
-<span class="sourceLineNo">634</span>      CacheConfig.DISABLED, true, getConf())) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      first = hfr.getFirstRowKey();<a name="line.635"></a>
-<span class="sourceLineNo">636</span>      last = hfr.getLastRowKey();<a name="line.636"></a>
-<span class="sourceLineNo">637</span>    } catch (FileNotFoundException fnfe) {<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      LOG.debug("encountered", fnfe);<a name="line.638"></a>
-<span class="sourceLineNo">639</span>      return new Pair&lt;&gt;(null, hfilePath.getName());<a name="line.639"></a>
-<span class="sourceLineNo">640</span>    }<a name="line.640"></a>
-<span class="sourceLineNo">641</span><a name="line.641"></a>
-<span class="sourceLineNo">642</span>    LOG.info("Trying to load hfile=" + hfilePath + " first=" + first.map(Bytes::toStringBinary) +<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      " last=" + last.map(Bytes::toStringBinary));<a name="line.643"></a>
-<span class="sourceLineNo">644</span>    if (!first.isPresent() || !last.isPresent()) {<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      assert !first.isPresent() &amp;&amp; !last.isPresent();<a name="line.645"></a>
-<span class="sourceLineNo">646</span>      // TODO what if this is due to a bad HFile?<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      LOG.info("hfile " + hfilePath + " has no entries, skipping");<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      return null;<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    }<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    if (Bytes.compareTo(first.get(), last.get()) &gt; 0) {<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      throw new IllegalArgumentException("Invalid range: " + Bytes.toStringBinary(first.get()) +<a name="line.651"></a>
-<span class="sourceLineNo">652</span>        " &gt; " + Bytes.toStringBinary(last.get()));<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    }<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    int idx =<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      Collections.binarySearch(startEndKeys, Pair.newPair(first.get(), HConstants.EMPTY_END_ROW),<a name="line.655"></a>
-<span class="sourceLineNo">656</span>        (p1, p2) -&gt; Bytes.compareTo(p1.getFirst(), p2.getFirst()));<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    if (idx &lt; 0) {<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      // not on boundary, returns -(insertion index). Calculate region it<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      // would be in.<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      idx = -(idx + 1) - 1;<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    }<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    int indexForCallable = idx;<a name="line.662"></a>
-<span class="sourceLineNo">663</span><a name="line.663"></a>
-<span class="sourceLineNo">664</span>    /*<a name="line.664"></a>
-<span class="sourceLineNo">665</span>     * we can consider there is a region hole in following conditions. 1) if idx &lt; 0,then first<a name="line.665"></a>
-<span class="sourceLineNo">666</span>     * region info is lost. 2) if the endkey of a region is not equal to the startkey of the next<a name="line.666"></a>
-<span class="sourceLineNo">667</span>     * region. 3) if the endkey of the last region is not empty.<a name="line.667"></a>
-<span class="sourceLineNo">668</span>     */<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    if (indexForCallable &lt; 0) {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      throw new IOException("The first region info for table " + tableName +<a name="line.670"></a>
-<span class="sourceLineNo">671</span>        " can't be found in hbase:meta.Please use hbck tool to fix it first.");<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    } else if ((indexForCallable == startEndKeys.size() - 1) &amp;&amp;<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      !Bytes.equals(startEndKeys.get(indexForCallable).getSecond(), HConstants.EMPTY_BYTE_ARRAY)) {<a name="line.673"></a>
-<span class="sourceLineNo">674</span>      throw new IOException("The last region info for table " + tableName +<a name="line.674"></a>
-<span class="sourceLineNo">675</span>        " can't be found in hbase:meta.Please use hbck tool to fix it first.");<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    } else if (indexForCallable + 1 &lt; startEndKeys.size() &amp;&amp;<a name="line.676"></a>
-<span class="sourceLineNo">677</span>      !(Bytes.compareTo(startEndKeys.get(indexForCallable).getSecond(),<a name="line.677"></a>
-<span class="sourceLineNo">678</span>        startEndKeys.get(indexForCallable + 1).getFirst()) == 0)) {<a name="line.678"></a>
-<span class="sourceLineNo">679</span>      throw new IOException("The endkey of one region for table " + tableName +<a name="line.679"></a>
-<span class="sourceLineNo">680</span>        " is not equal to the startkey of the next region in hbase:meta." +<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        "Please use hbck tool to fix it first.");<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    }<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>    boolean lastKeyInRange = Bytes.compareTo(last.get(), startEndKeys.get(idx).getSecond()) &lt; 0 ||<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      Bytes.equals(startEndKeys.get(idx).getSecond(), HConstants.EMPTY_BYTE_ARRAY);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    if (!lastKeyInRange) {<a name="line.686"></a>
-<span class="sourceLineNo">687</span>      Pair&lt;byte[], byte[]&gt; startEndKey = startEndKeys.get(indexForCallable);<a name="line.687"></a>
-<span class="sourceLineNo">688</span>      List&lt;LoadQueueItem&gt; lqis =<a name="line.688"></a>
-<span class="sourceLineNo">689</span>        splitStoreFile(item, FutureUtils.get(conn.getAdmin().getDescriptor(tableName)),<a name="line.689"></a>
-<span class="sourceLineNo">690</span>            startEndKey.getSecond());<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      return new Pair&lt;&gt;(lqis, null);<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    }<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>    // group regions.<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    regionGroups.put(ByteBuffer.wrap(startEndKeys.get(idx).getFirst()), item);<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    return null;<a name="line.696"></a>
-<span class="sourceLineNo">697</span>  }<a name="line.697"></a>
-<span class="sourceLineNo">698</span><a name="line.698"></a>
-<span class="sourceLineNo">699</span>  /**<a name="line.699"></a>
-<span class="sourceLineNo">700</span>   * Split a storefile into a top and bottom half, maintaining the metadata, recreating bloom<a name="line.700"></a>
-<span class="sourceLineNo">701</span>   * filters, etc.<a name="line.701"></a>
-<span class="sourceLineNo">702</span>   */<a name="line.702"></a>
-<span class="sourceLineNo">703</span>  @VisibleForTesting<a name="line.703"></a>
-<span class="sourceLineNo">704</span>  static void splitStoreFile(Configuration conf, Path inFile, ColumnFamilyDescriptor familyDesc,<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      byte[] splitKey, Path bottomOut, Path topOut) throws IOException {<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    // Open reader with no block cache, and not in-memory<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    Reference topReference = Reference.createTopReference(splitKey);<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    Reference bottomReference = Reference.createBottomReference(splitKey);<a name="line.708"></a>
-<span class="sourceLineNo">709</span><a name="line.709"></a>
-<span class="sourceLineNo">710</span>    copyHFileHalf(conf, inFile, topOut, topReference, familyDesc);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    copyHFileHalf(conf, inFile, bottomOut, bottomReference, familyDesc);<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  /**<a name="line.714"></a>
-<span class="sourceLineNo">715</span>   * Copy half of an HFile into a new HFile.<a name="line.715"></a>
-<span class="sourceLineNo">716</span>   */<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  private static void copyHFileHalf(Configuration conf, Path inFile, Path outFile,<a name="line.717"></a>
-<span class="sourceLineNo">718</span>      Reference reference, ColumnFamilyDescriptor familyDescriptor) throws IOException {<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    FileSystem fs = inFile.getFileSystem(conf);<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    CacheConfig cacheConf = CacheConfig.DISABLED;<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    HalfStoreFileReader halfReader = null;<a name="line.721"></a>
-<span class="sourceLineNo">722</span>    StoreFileWriter halfWriter = null;<a name="line.722"></a>
-<span class="sourceLineNo">723</span>    try {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>      ReaderContext context = new ReaderContextBuilder()<a name="line.724"></a>
-<span class="sourceLineNo">725</span>          .withFileSystemAndPath(fs, inFile).build();<a name="line.725"></a>
-<span class="sourceLineNo">726</span>      HFileInfo hfile = new HFileInfo(context, conf);<a name="line.726"></a>
-<span class="sourceLineNo">727</span>      halfReader = new HalfStoreFileReader(context, hfile, cacheConf, reference,<a name="line.727"></a>
-<span class="sourceLineNo">728</span>        new AtomicInteger(0), conf);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>      hfile.initMetaAndIndex(halfReader.getHFileReader());<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      Map&lt;byte[], byte[]&gt; fileInfo = halfReader.loadFileInfo();<a name="line.730"></a>
-<span class="sourceLineNo">731</span><a name="line.731"></a>
-<span class="sourceLineNo">732</span>      int blocksize = familyDescriptor.getBlocksize();<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      Algorithm compression = familyDescriptor.getCompressionType();<a name="line.733"></a>
-<span class="sourceLineNo">734</span>      BloomType bloomFilterType = familyDescriptor.getBloomFilterType();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>      HFileContext hFileContext = new HFileContextBuilder().withCompression(compression)<a name="line.735"></a>
-<span class="sourceLineNo">736</span>        .withChecksumType(HStore.getChecksumType(conf))<a name="line.736"></a>
-<span class="sourceLineNo">737</span>        .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf)).withBlockSize(blocksize)<a name="line.737"></a>
-<span class="sourceLineNo">738</span>        .withDataBlockEncoding(familyDescriptor.getDataBlockEncoding()).withIncludesTags(true)<a name="line.738"></a>
-<span class="sourceLineNo">739</span>        .build();<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      halfWriter = new StoreFileWriter.Builder(conf, cacheConf, fs).withFilePath(outFile)<a name="line.740"></a>
-<span class="sourceLineNo">741</span>        .withBloomType(bloomFilterType).withFileContext(hFileContext).build();<a name="line.741"></a>
-<span class="sourceLineNo">742</span>      HFileScanner scanner = halfReader.getScanner(false, false, false);<a name="line.742"></a>
-<span class="sourceLineNo">743</span>      scanner.seekTo();<a name="line.743"></a>
-<span class="sourceLineNo">744</span>      do {<a name="line.744"></a>
-<span class="sourceLineNo">745</span>        halfWriter.append(scanner.getCell());<a name="line.745"></a>
-<span class="sourceLineNo">746</span>      } while (scanner.next());<a name="line.746"></a>
-<span class="sourceLineNo">747</span><a name="line.747"></a>
-<span class="sourceLineNo">748</span>      for (Map.Entry&lt;byte[], byte[]&gt; entry : fileInfo.entrySet()) {<a name="line.748"></a>
-<span class="sourceLineNo">749</span>        if (shouldCopyHFileMetaKey(entry.getKey())) {<a name="line.749"></a>
-<span class="sourceLineNo">750</span>          halfWriter.appendFileInfo(entry.getKey(), entry.getValue());<a name="line.750"></a>
-<span class="sourceLineNo">751</span>        }<a name="line.751"></a>
-<span class="sourceLineNo">752</span>      }<a name="line.752"></a>
-<span class="sourceLineNo">753</span>    } finally {<a name="line.753"></a>
-<span class="sourceLineNo">754</span>      if (halfReader != null) {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>        try {<a name="line.755"></a>
-<span class="sourceLineNo">756</span>          halfReader.close(cacheConf.shouldEvictOnClose());<a name="line.756"></a>
-<span class="sourceLineNo">757</span>        } catch (IOException e) {<a name="line.757"></a>
-<span class="sourceLineNo">758</span>          LOG.warn("failed to close hfile reader for " + inFile, e);<a name="line.758"></a>
-<span class="sourceLineNo">759</span>        }<a name="line.759"></a>
-<span class="sourceLineNo">760</span>      }<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      if (halfWriter != null) {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>        halfWriter.close();<a name="line.762"></a>
-<span class="sourceLineNo">763</span>      }<a name="line.763"></a>
-<span class="sourceLineNo">764</span>    }<a name="line.764"></a>
-<span class="sourceLineNo">765</span>  }<a name="line.765"></a>
-<span class="sourceLineNo">766</span><a name="line.766"></a>
-<span class="sourceLineNo">767</span>  /**<a name="line.767"></a>
-<span class="sourceLineNo">768</span>   * Infers region boundaries for a new table.<a name="line.768"></a>
-<span class="sourceLineNo">769</span>   * &lt;p/&gt;<a name="line.769"></a>
-<span class="sourceLineNo">770</span>   * Parameter: &lt;br/&gt;<a name="line.770"></a>
-<span class="sourceLineNo">771</span>   * bdryMap is a map between keys to an integer belonging to {+1, -1}<a name="line.771"></a>
-<span class="sourceLineNo">772</span>   * &lt;ul&gt;<a name="line.772"></a>
-<span class="sourceLineNo">773</span>   * &lt;li&gt;If a key is a start key of a file, then it maps to +1&lt;/li&gt;<a name="line.773"></a>
-<span class="sourceLineNo">774</span>   * &lt;li&gt;If a key is an end key of a file, then it maps to -1&lt;/li&gt;<a name="line.774"></a>
-<span class="sourceLineNo">775</span>   * &lt;/ul&gt;<a name="line.775"></a>
-<span class="sourceLineNo">776</span>   * &lt;p&gt;<a name="line.776"></a>
-<span class="sourceLineNo">777</span>   * Algo:&lt;br/&gt;<a name="line.777"></a>
-<span class="sourceLineNo">778</span>   * &lt;ol&gt;<a name="line.778"></a>
-<span class="sourceLineNo">779</span>   * &lt;li&gt;Poll on the keys in order:<a name="line.779"></a>
-<span class="sourceLineNo">780</span>   * &lt;ol type="a"&gt;<a name="line.780"></a>
-<span class="sourceLineNo">781</span>   * &lt;li&gt;Keep adding the mapped values to these keys (runningSum)&lt;/li&gt;<a name="line.781"></a>
-<span class="sourceLineNo">782</span>   * &lt;li&gt;Each time runningSum reaches 0, add the start Key from when the runningSum had started to a<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * boundary list.&lt;/li&gt;<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   * &lt;/ol&gt;<a name="line.784"></a>
-<span class="sourceLineNo">785</span>   * &lt;/li&gt;<a name="line.785"></a>
-<span class="sourceLineNo">786</span>   * &lt;li&gt;Return the boundary list.&lt;/li&gt;<a name="line.786"></a>
-<span class="sourceLineNo">787</span>   * &lt;/ol&gt;<a name="line.787"></a>
-<span class="sourceLineNo">788</span>   */<a name="line.788"></a>
-<span class="sourceLineNo">789</span>  public static byte[][] inferBoundaries(SortedMap&lt;byte[], Integer&gt; bdryMap) {<a name="line.789"></a>
-<span class="sourceLineNo">790</span>    List&lt;byte[]&gt; keysArray = new ArrayList&lt;&gt;();<a name="line.790"></a>
-<span class="sourceLineNo">791</span>    int runningValue = 0;<a name="line.791"></a>
-<span class="sourceLineNo">792</span>    byte[] currStartKey = null;<a name="line.792"></a>
-<span class="sourceLineNo">793</span>    boolean firstBoundary = true;<a name="line.793"></a>
-<span class="sourceLineNo">794</span><a name="line.794"></a>
-<span class="sourceLineNo">795</span>    for (Map.Entry&lt;byte[], Integer&gt; item : bdryMap.entrySet()) {<a name="line.795"></a>
-<span class="sourceLineNo">796</span>      if (runningValue == 0) {<a name="line.796"></a>
-<span class="sourceLineNo">797</span>        currStartKey = item.getKey();<a name="line.797"></a>
-<span class="sourceLineNo">798</span>      }<a name="line.798"></a>
-<span class="sourceLineNo">799</span>      runningValue += item.getValue();<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      if (runningValue == 0) {<a name="line.800"></a>
-<span class="sourceLineNo">801</span>        if (!firstBoundary) {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>          keysArray.add(currStartKey);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>        }<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        firstBoundary = false;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>      }<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    }<a name="line.806"></a>
-<span class="sourceLineNo">807</span><a name="line.807"></a>
-<span class="sourceLineNo">808</span>    return keysArray.toArray(new byte[0][]);<a name="line.808"></a>
-<span class="sourceLineNo">809</span>  }<a name="line.809"></a>
-<span class="sourceLineNo">810</span><a name="line.810"></a>
-<span class="sourceLineNo">811</span>  /**<a name="line.811"></a>
-<span class="sourceLineNo">812</span>   * If the table is created for the first time, then "completebulkload" reads the files twice. More<a name="line.812"></a>
-<span class="sourceLineNo">813</span>   * modifications necessary if we want to avoid doing it.<a name="line.813"></a>
-<span class="sourceLineNo">814</span>   */<a name="line.814"></a>
-<span class="sourceLineNo">815</span>  private void createTable(TableName tableName, Path hfofDir, AsyncAdmin admin) throws IOException {<a name="line.815"></a>
-<span class="sourceLineNo">816</span>    final FileSystem fs = hfofDir.getFileSystem(getConf());<a name="line.816"></a>
-<span class="sourceLineNo">817</span><a name="line.817"></a>
-<span class="sourceLineNo">818</span>    // Add column families<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    // Build a set of keys<a name="line.819"></a>
-<span class="sourceLineNo">820</span>    List&lt;ColumnFamilyDescriptorBuilder&gt; familyBuilders = new ArrayList&lt;&gt;();<a name="line.820"></a>
-<span class="sourceLineNo">821</span>    SortedMap&lt;byte[], Integer&gt; map = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.821"></a>
-<span class="sourceLineNo">822</span>    visitBulkHFiles(fs, hfofDir, new BulkHFileVisitor&lt;ColumnFamilyDescriptorBuilder&gt;() {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>      @Override<a name="line.823"></a>
-<span class="sourceLineNo">824</span>      public ColumnFamilyDescriptorBuilder bulkFamily(byte[] familyName) {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>        ColumnFamilyDescriptorBuilder builder =<a name="line.825"></a>
-<span class="sourceLineNo">826</span>          ColumnFamilyDescriptorBuilder.newBuilder(familyName);<a name="line.826"></a>
-<span class="sourceLineNo">827</span>        familyBuilders.add(builder);<a name="line.827"></a>
-<span class="sourceLineNo">828</span>        return builder;<a name="line.828"></a>
-<span class="sourceLineNo">829</span>      }<a name="line.829"></a>
-<span class="sourceLineNo">830</span><a name="line.830"></a>
-<span class="sourceLineNo">831</span>      @Override<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      public void bulkHFile(ColumnFamilyDescriptorBuilder builder, FileStatus hfileStatus)<a name="line.832"></a>
-<span class="sourceLineNo">833</span>          throws IOException {<a name="line.833"></a>
-<span class="sourceLineNo">834</span>        Path hfile = hfileStatus.getPath();<a name="line.834"></a>
-<span class="sourceLineNo">835</span>        try (HFile.Reader reader =<a name="line.835"></a>
-<span class="sourceLineNo">836</span>          HFile.createReader(fs, hfile, CacheConfig.DISABLED, true, getConf())) {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>          if (builder.getCompressionType() != reader.getFileContext().getCompression()) {<a name="line.837"></a>
-<span class="sourceLineNo">838</span>            builder.setCompressionType(reader.getFileContext().getCompression());<a name="line.838"></a>
-<span class="sourceLineNo">839</span>            LOG.info("Setting compression " + reader.getFileContext().getCompression().name() +<a name="line.839"></a>
-<span class="sourceLineNo">840</span>              " for family " + builder.getNameAsString());<a name="line.840"></a>
-<span class="sourceLineNo">841</span>          }<a name="line.841"></a>
-<span class="sourceLineNo">842</span>          byte[] first = reader.getFirstRowKey().get();<a name="line.842"></a>
-<span class="sourceLineNo">843</span>          byte[] last = reader.getLastRowKey().get();<a name="line.843"></a>
-<span class="sourceLineNo">844</span><a name="line.844"></a>
-<span class="sourceLineNo">845</span>          LOG.info("Trying to figure out region boundaries hfile=" + hfile + " first=" +<a name="line.845"></a>
-<span class="sourceLineNo">846</span>            Bytes.toStringBinary(first) + " last=" + Bytes.toStringBinary(last));<a name="line.846"></a>
-<span class="sourceLineNo">847</span><a name="line.847"></a>
-<span class="sourceLineNo">848</span>          // To eventually infer start key-end key boundaries<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          Integer value = map.getOrDefault(first, 0);<a name="line.849"></a>
-<span class="sourceLineNo">850</span>          map.put(first, value + 1);<a name="line.850"></a>
+<span class="sourceLineNo">599</span>    // If the current item is already the result of previous splits,<a name="line.599"></a>
+<span class="sourceLineNo">600</span>    // we don't need it anymore. Clean up to save space.<a name="line.600"></a>
+<span class="sourceLineNo">601</span>    // It is not part of the original input files.<a name="line.601"></a>
+<span class="sourceLineNo">602</span>    try {<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      if (tmpDir.getName().equals(TMP_DIR)) {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>        fs.delete(hfilePath, false);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    } catch (IOException e) {<a name="line.606"></a>
+<span class="sourceLineNo">607</span>      LOG.warn("Unable to delete temporary split file " + hfilePath);<a name="line.607"></a>
+<span class="sourceLineNo">608</span>    }<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    LOG.info("Successfully split into new HFiles " + botOut + " and " + topOut);<a name="line.609"></a>
+<span class="sourceLineNo">610</span>    return lqis;<a name="line.610"></a>
+<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
+<span class="sourceLineNo">612</span><a name="line.612"></a>
+<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
+<span class="sourceLineNo">614</span>   * Attempt to assign the given load queue item into its target region group. If the hfile boundary<a name="line.614"></a>
+<span class="sourceLineNo">615</span>   * no longer fits into a region, physically splits the hfile such that the new bottom half will<a name="line.615"></a>
+<span class="sourceLineNo">616</span>   * fit and returns the list of LQI's corresponding to the resultant hfiles.<a name="line.616"></a>
+<span class="sourceLineNo">617</span>   * &lt;p/&gt;<a name="line.617"></a>
+<span class="sourceLineNo">618</span>   * protected for testing<a name="line.618"></a>
+<span class="sourceLineNo">619</span>   * @throws IOException if an IO failure is encountered<a name="line.619"></a>
+<span class="sourceLineNo">620</span>   */<a name="line.620"></a>
+<span class="sourceLineNo">621</span>  @VisibleForTesting<a name="line.621"></a>
+<span class="sourceLineNo">622</span>  protected Pair&lt;List&lt;LoadQueueItem&gt;, String&gt; groupOrSplit(AsyncClusterConnection conn,<a name="line.622"></a>
+<span class="sourceLineNo">623</span>      TableName tableName, Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups, LoadQueueItem item,<a name="line.623"></a>
+<span class="sourceLineNo">624</span>      List&lt;Pair&lt;byte[], byte[]&gt;&gt; startEndKeys) throws IOException {<a name="line.624"></a>
+<span class="sourceLineNo">625</span>    Path hfilePath = item.getFilePath();<a name="line.625"></a>
+<span class="sourceLineNo">626</span>    Optional&lt;byte[]&gt; first, last;<a name="line.626"></a>
+<span class="sourceLineNo">627</span>    try (HFile.Reader hfr = HFile.createReader(hfilePath.getFileSystem(getConf()), hfilePath,<a name="line.627"></a>
+<span class="sourceLineNo">628</span>      CacheConfig.DISABLED, true, getConf())) {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>      first = hfr.getFirstRowKey();<a name="line.629"></a>
+<span class="sourceLineNo">630</span>      last = hfr.getLastRowKey();<a name="line.630"></a>
+<span class="sourceLineNo">631</span>    } catch (FileNotFoundException fnfe) {<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      LOG.debug("encountered", fnfe);<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      return new Pair&lt;&gt;(null, hfilePath.getName());<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
+<span class="sourceLineNo">635</span><a name="line.635"></a>
+<span class="sourceLineNo">636</span>    LOG.info("Trying to load hfile=" + hfilePath + " first=" + first.map(Bytes::toStringBinary) +<a name="line.636"></a>
+<span class="sourceLineNo">637</span>      " last=" + last.map(Bytes::toStringBinary));<a name="line.637"></a>
+<span class="sourceLineNo">638</span>    if (!first.isPresent() || !last.isPresent()) {<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      assert !first.isPresent() &amp;&amp; !last.isPresent();<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      // TODO what if this is due to a bad HFile?<a name="line.640"></a>
+<span class="sourceLineNo">641</span>      LOG.info("hfile " + hfilePath + " has no entries, skipping");<a name="line.641"></a>
+<span class="sourceLineNo">642</span>      return null;<a name="line.642"></a>
+<span class="sourceLineNo">643</span>    }<a name="line.643"></a>
+<span class="sourceLineNo">644</span>    if (Bytes.compareTo(first.get(), last.get()) &gt; 0) {<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      throw new IllegalArgumentException("Invalid range: " + Bytes.toStringBinary(first.get()) +<a name="line.645"></a>
+<span class="sourceLineNo">646</span>        " &gt; " + Bytes.toStringBinary(last.get()));<a name="line.646"></a>
+<span class="sourceLineNo">647</span>    }<a name="line.647"></a>
+<span class="sourceLineNo">648</span>    int idx =<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      Collections.binarySearch(startEndKeys, Pair.newPair(first.get(), HConstants.EMPTY_END_ROW),<a name="line.649"></a>
+<span class="sourceLineNo">650</span>        (p1, p2) -&gt; Bytes.compareTo(p1.getFirst(), p2.getFirst()));<a name="line.650"></a>
+<span class="sourceLineNo">651</span>    if (idx &lt; 0) {<a name="line.651"></a>
+<span class="sourceLineNo">652</span>      // not on boundary, returns -(insertion index). Calculate region it<a name="line.652"></a>
+<span class="sourceLineNo">653</span>      // would be in.<a name="line.653"></a>
+<span class="sourceLineNo">654</span>      idx = -(idx + 1) - 1;<a name="line.654"></a>
+<span class="sourceLineNo">655</span>    }<a name="line.655"></a>
+<span class="sourceLineNo">656</span>    int indexForCallable = idx;<a name="line.656"></a>
+<span class="sourceLineNo">657</span><a name="line.657"></a>
+<span class="sourceLineNo">658</span>    /*<a name="line.658"></a>
+<span class="sourceLineNo">659</span>     * we can consider there is a region hole in following conditions. 1) if idx &lt; 0,then first<a name="line.659"></a>
+<span class="sourceLineNo">660</span>     * region info is lost. 2) if the endkey of a region is not equal to the startkey of the next<a name="line.660"></a>
+<span class="sourceLineNo">661</span>     * region. 3) if the endkey of the last region is not empty.<a name="line.661"></a>
+<span class="sourceLineNo">662</span>     */<a name="line.662"></a>
+<span class="sourceLineNo">663</span>    if (indexForCallable &lt; 0) {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>      throw new IOException("The first region info for table " + tableName +<a name="line.664"></a>
+<span class="sourceLineNo">665</span>        " can't be found in hbase:meta.Please use hbck tool to fix it first.");<a name="line.665"></a>
+<span class="sourceLineNo">666</span>    } else if ((indexForCallable == startEndKeys.size() - 1) &amp;&amp;<a name="line.666"></a>
+<span class="sourceLineNo">667</span>      !Bytes.equals(startEndKeys.get(indexForCallable).getSecond(), HConstants.EMPTY_BYTE_ARRAY)) {<a name="line.667"></a>
+<span class="sourceLineNo">668</span>      throw new IOException("The last region info for table " + tableName +<a name="line.668"></a>
+<span class="sourceLineNo">669</span>        " can't be found in hbase:meta.Please use hbck tool to fix it first.");<a name="line.669"></a>
+<span class="sourceLineNo">670</span>    } else if (indexForCallable + 1 &lt; startEndKeys.size() &amp;&amp;<a name="line.670"></a>
+<span class="sourceLineNo">671</span>      !(Bytes.compareTo(startEndKeys.get(indexForCallable).getSecond(),<a name="line.671"></a>
+<span class="sourceLineNo">672</span>        startEndKeys.get(indexForCallable + 1).getFirst()) == 0)) {<a name="line.672"></a>
+<span class="sourceLineNo">673</span>      throw new IOException("The endkey of one region for table " + tableName +<a name="line.673"></a>
+<span class="sourceLineNo">674</span>        " is not equal to the startkey of the next region in hbase:meta." +<a name="line.674"></a>
+<span class="sourceLineNo">675</span>        "Please use hbck tool to fix it first.");<a name="line.675"></a>
+<span class="sourceLineNo">676</span>    }<a name="line.676"></a>
+<span class="sourceLineNo">677</span><a name="line.677"></a>
+<span class="sourceLineNo">678</span>    boolean lastKeyInRange = Bytes.compareTo(last.get(), startEndKeys.get(idx).getSecond()) &lt; 0 ||<a name="line.678"></a>
+<span class="sourceLineNo">679</span>      Bytes.equals(startEndKeys.get(idx).getSecond(), HConstants.EMPTY_BYTE_ARRAY);<a name="line.679"></a>
+<span class="sourceLineNo">680</span>    if (!lastKeyInRange) {<a name="line.680"></a>
+<span class="sourceLineNo">681</span>      Pair&lt;byte[], byte[]&gt; startEndKey = startEndKeys.get(indexForCallable);<a name="line.681"></a>
+<span class="sourceLineNo">682</span>      List&lt;LoadQueueItem&gt; lqis =<a name="line.682"></a>
+<span class="sourceLineNo">683</span>        splitStoreFile(item, FutureUtils.get(conn.getAdmin().getDescriptor(tableName)),<a name="line.683"></a>
+<span class="sourceLineNo">684</span>            startEndKey.getSecond());<a name="line.684"></a>
+<span class="sourceLineNo">685</span>      return new Pair&lt;&gt;(lqis, null);<a name="line.685"></a>
+<span class="sourceLineNo">686</span>    }<a name="line.686"></a>
+<span class="sourceLineNo">687</span><a name="line.687"></a>
+<span class="sourceLineNo">688</span>    // group regions.<a name="line.688"></a>
+<span class="sourceLineNo">689</span>    regionGroups.put(ByteBuffer.wrap(startEndKeys.get(idx).getFirst()), item);<a name="line.689"></a>
+<span class="sourceLineNo">690</span>    return null;<a name="line.690"></a>
+<span class="sourceLineNo">691</span>  }<a name="line.691"></a>
+<span class="sourceLineNo">692</span><a name="line.692"></a>
+<span class="sourceLineNo">693</span>  /**<a name="line.693"></a>
+<span class="sourceLineNo">694</span>   * Split a storefile into a top and bottom half, maintaining the metadata, recreating bloom<a name="line.694"></a>
+<span class="sourceLineNo">695</span>   * filters, etc.<a name="line.695"></a>
+<span class="sourceLineNo">696</span>   */<a name="line.696"></a>
+<span class="sourceLineNo">697</span>  @VisibleForTesting<a name="line.697"></a>
+<span class="sourceLineNo">698</span>  static void splitStoreFile(Configuration conf, Path inFile, ColumnFamilyDescriptor familyDesc,<a name="line.698"></a>
+<span class="sourceLineNo">699</span>      byte[] splitKey, Path bottomOut, Path topOut) throws IOException {<a name="line.699"></a>
+<span class="sourceLineNo">700</span>    // Open reader with no block cache, and not in-memory<a name="line.700"></a>
+<span class="sourceLineNo">701</span>    Reference topReference = Reference.createTopReference(splitKey);<a name="line.701"></a>
+<span class="sourceLineNo">702</span>    Reference bottomReference = Reference.createBottomReference(splitKey);<a name="line.702"></a>
+<span class="sourceLineNo">703</span><a name="line.703"></a>
+<span class="sourceLineNo">704</span>    copyHFileHalf(conf, inFile, topOut, topReference, familyDesc);<a name="line.704"></a>
+<span class="sourceLineNo">705</span>    copyHFileHalf(conf, inFile, bottomOut, bottomReference, familyDesc);<a name="line.705"></a>
+<span class="sourceLineNo">706</span>  }<a name="line.706"></a>
+<span class="sourceLineNo">707</span><a name="line.707"></a>
+<span class="sourceLineNo">708</span>  /**<a name="line.708"></a>
+<span class="sourceLineNo">709</span>   * Copy half of an HFile into a new HFile.<a name="line.709"></a>
+<span class="sourceLineNo">710</span>   */<a name="line.710"></a>
+<span class="sourceLineNo">711</span>  private static void copyHFileHalf(Configuration conf, Path inFile, Path outFile,<a name="line.711"></a>
+<span class="sourceLineNo">712</span>      Reference reference, ColumnFamilyDescriptor familyDescriptor) throws IOException {<a name="line.712"></a>
+<span class="sourceLineNo">713</span>    FileSystem fs = inFile.getFileSystem(conf);<a name="line.713"></a>
+<span class="sourceLineNo">714</span>    CacheConfig cacheConf = CacheConfig.DISABLED;<a name="line.714"></a>
+<span class="sourceLineNo">715</span>    HalfStoreFileReader halfReader = null;<a name="line.715"></a>
+<span class="sourceLineNo">716</span>    StoreFileWriter halfWriter = null;<a name="line.716"></a>
+<span class="sourceLineNo">717</span>    try {<a name="line.717"></a>
+<span class="sourceLineNo">718</span>      ReaderContext context = new ReaderContextBuilder()<a name="line.718"></a>
+<span class="sourceLineNo">719</span>          .withFileSystemAndPath(fs, inFile).build();<a name="line.719"></a>
+<span class="sourceLineNo">720</span>      HFileInfo hfile = new HFileInfo(context, conf);<a name="line.720"></a>
+<span class="sourceLineNo">721</span>      halfReader = new HalfStoreFileReader(context, hfile, cacheConf, reference,<a name="line.721"></a>
+<span class="sourceLineNo">722</span>        new AtomicInteger(0), conf);<a name="line.722"></a>
+<span class="sourceLineNo">723</span>      hfile.initMetaAndIndex(halfReader.getHFileReader());<a name="line.723"></a>
+<span class="sourceLineNo">724</span>      Map&lt;byte[], byte[]&gt; fileInfo = halfReader.loadFileInfo();<a name="line.724"></a>
+<span class="sourceLineNo">725</span><a name="line.725"></a>
+<span class="sourceLineNo">726</span>      int blocksize = familyDescriptor.getBlocksize();<a name="line.726"></a>
+<span class="sourceLineNo">727</span>      Algorithm compression = familyDescriptor.getCompressionType();<a name="line.727"></a>
+<span class="sourceLineNo">728</span>      BloomType bloomFilterType = familyDescriptor.getBloomFilterType();<a name="line.728"></a>
+<span class="sourceLineNo">729</span>      HFileContext hFileContext = new HFileContextBuilder().withCompression(compression)<a name="line.729"></a>
+<span class="sourceLineNo">730</span>        .withChecksumType(HStore.getChecksumType(conf))<a name="line.730"></a>
+<span class="sourceLineNo">731</span>        .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf)).withBlockSize(blocksize)<a name="line.731"></a>
+<span class="sourceLineNo">732</span>        .withDataBlockEncoding(familyDescriptor.getDataBlockEncoding()).withIncludesTags(true)<a name="line.732"></a>
+<span class="sourceLineNo">733</span>        .build();<a name="line.733"></a>
+<span class="sourceLineNo">734</span>      halfWriter = new StoreFileWriter.Builder(conf, cacheConf, fs).withFilePath(outFile)<a name="line.734"></a>
+<span class="sourceLineNo">735</span>        .withBloomType(bloomFilterType).withFileContext(hFileContext).build();<a name="line.735"></a>
+<span class="sourceLineNo">736</span>      HFileScanner scanner = halfReader.getScanner(false, false, false);<a name="line.736"></a>
+<span class="sourceLineNo">737</span>      scanner.seekTo();<a name="line.737"></a>
+<span class="sourceLineNo">738</span>      do {<a name="line.738"></a>
+<span class="sourceLineNo">739</span>        halfWriter.append(scanner.getCell());<a name="line.739"></a>
+<span class="sourceLineNo">740</span>      } while (scanner.next());<a name="line.740"></a>
+<span class="sourceLineNo">741</span><a name="line.741"></a>
+<span class="sourceLineNo">742</span>      for (Map.Entry&lt;byte[], byte[]&gt; entry : fileInfo.entrySet()) {<a name="line.742"></a>
+<span class="sourceLineNo">743</span>        if (shouldCopyHFileMetaKey(entry.getKey())) {<a name="line.743"></a>
+<span class="sourceLineNo">744</span>          halfWriter.appendFileInfo(entry.getKey(), entry.getValue());<a name="line.744"></a>
+<span class="sourceLineNo">745</span>        }<a name="line.745"></a>
+<span class="sourceLineNo">746</span>      }<a name="line.746"></a>
+<span class="sourceLineNo">747</span>    } finally {<a name="line.747"></a>
+<span class="sourceLineNo">748</span>      if (halfReader != null) {<a name="line.748"></a>
+<span class="sourceLineNo">749</span>        try {<a name="line.749"></a>
+<span class="sourceLineNo">750</span>          halfReader.close(cacheConf.shouldEvictOnClose());<a name="line.750"></a>
+<span class="sourceLineNo">751</span>        } catch (IOException e) {<a name="line.751"></a>
+<span class="sourceLineNo">752</span>          LOG.warn("failed to close hfile reader for " + inFile, e);<a name="line.752"></a>
+<span class="sourceLineNo">753</span>        }<a name="line.753"></a>
+<span class="sourceLineNo">754</span>      }<a name="line.754"></a>
+<span class="sourceLineNo">755</span>      if (halfWriter != null) {<a name="line.755"></a>
+<span class="sourceLineNo">756</span>        halfWriter.close();<a name="line.756"></a>
+<span class="sourceLineNo">757</span>      }<a name="line.757"></a>
+<span class="sourceLineNo">758</span>    }<a name="line.758"></a>
+<span class="sourceLineNo">759</span>  }<a name="line.759"></a>
+<span class="sourceLineNo">760</span><a name="line.760"></a>
+<span class="sourceLineNo">761</span>  /**<a name="line.761"></a>
+<span class="sourceLineNo">762</span>   * Infers region boundaries for a new table.<a name="line.762"></a>
+<span class="sourceLineNo">763</span>   * &lt;p/&gt;<a name="line.763"></a>
+<span class="sourceLineNo">764</span>   * Parameter: &lt;br/&gt;<a name="line.764"></a>
+<span class="sourceLineNo">765</span>   * bdryMap is a map between keys to an integer belonging to {+1, -1}<a name="line.765"></a>
+<span class="sourceLineNo">766</span>   * &lt;ul&gt;<a name="line.766"></a>
+<span class="sourceLineNo">767</span>   * &lt;li&gt;If a key is a start key of a file, then it maps to +1&lt;/li&gt;<a name="line.767"></a>
+<span class="sourceLineNo">768</span>   * &lt;li&gt;If a key is an end key of a file, then it maps to -1&lt;/li&gt;<a name="line.768"></a>
+<span class="sourceLineNo">769</span>   * &lt;/ul&gt;<a name="line.769"></a>
+<span class="sourceLineNo">770</span>   * &lt;p&gt;<a name="line.770"></a>
+<span class="sourceLineNo">771</span>   * Algo:&lt;br/&gt;<a name="line.771"></a>
+<span class="sourceLineNo">772</span>   * &lt;ol&gt;<a name="line.772"></a>
+<span class="sourceLineNo">773</span>   * &lt;li&gt;Poll on the keys in order:<a name="line.773"></a>
+<span class="sourceLineNo">774</span>   * &lt;ol type="a"&gt;<a name="line.774"></a>
+<span class="sourceLineNo">775</span>   * &lt;li&gt;Keep adding the mapped values to these keys (runningSum)&lt;/li&gt;<a name="line.775"></a>
+<span class="sourceLineNo">776</span>   * &lt;li&gt;Each time runningSum reaches 0, add the start Key from when the runningSum had started to a<a name="line.776"></a>
+<span class="sourceLineNo">777</span>   * boundary list.&lt;/li&gt;<a name="line.777"></a>
+<span class="sourceLineNo">778</span>   * &lt;/ol&gt;<a name="line.778"></a>
+<span class="sourceLineNo">779</span>   * &lt;/li&gt;<a name="line.779"></a>
+<span class="sourceLineNo">780</span>   * &lt;li&gt;Return the boundary list.&lt;/li&gt;<a name="line.780"></a>
+<span class="sourceLineNo">781</span>   * &lt;/ol&gt;<a name="line.781"></a>
+<span class="sourceLineNo">782</span>   */<a name="line.782"></a>
+<span class="sourceLineNo">783</span>  public static byte[][] inferBoundaries(SortedMap&lt;byte[], Integer&gt; bdryMap) {<a name="line.783"></a>
+<span class="sourceLineNo">784</span>    List&lt;byte[]&gt; keysArray = new ArrayList&lt;&gt;();<a name="line.784"></a>
+<span class="sourceLineNo">785</span>    int runningValue = 0;<a name="line.785"></a>
+<span class="sourceLineNo">786</span>    byte[] currStartKey = null;<a name="line.786"></a>
+<span class="sourceLineNo">787</span>    boolean firstBoundary = true;<a name="line.787"></a>
+<span class="sourceLineNo">788</span><a name="line.788"></a>
+<span class="sourceLineNo">789</span>    for (Map.Entry&lt;byte[], Integer&gt; item : bdryMap.entrySet()) {<a name="line.789"></a>
+<span class="sourceLineNo">790</span>      if (runningValue == 0) {<a name="line.790"></a>
+<span class="sourceLineNo">791</span>        currStartKey = item.getKey();<a name="line.791"></a>
+<span class="sourceLineNo">792</span>      }<a name="line.792"></a>
+<span class="sourceLineNo">793</span>      runningValue += item.getValue();<a name="line.793"></a>
+<span class="sourceLineNo">794</span>      if (runningValue == 0) {<a name="line.794"></a>
+<span class="sourceLineNo">795</span>        if (!firstBoundary) {<a name="line.795"></a>
+<span class="sourceLineNo">796</span>          keysArray.add(currStartKey);<a name="line.796"></a>
+<span class="sourceLineNo">797</span>        }<a name="line.797"></a>
+<span class="sourceLineNo">798</span>        firstBoundary = false;<a name="line.798"></a>
+<span class="sourceLineNo">799</span>      }<a name="line.799"></a>
+<span class="sourceLineNo">800</span>    }<a name="line.800"></a>
+<span class="sourceLineNo">801</span><a name="line.801"></a>
+<span class="sourceLineNo">802</span>    return keysArray.toArray(new byte[0][]);<a name="line.802"></a>
+<span class="sourceLineNo">803</span>  }<a name="line.803"></a>
+<span class="sourceLineNo">804</span><a name="line.804"></a>
+<span class="sourceLineNo">805</span>  /**<a name="line.805"></a>
+<span class="sourceLineNo">806</span>   * If the table is created for the first time, then "completebulkload" reads the files twice. More<a name="line.806"></a>
+<span class="sourceLineNo">807</span>   * modifications necessary if we want to avoid doing it.<a name="line.807"></a>
+<span class="sourceLineNo">808</span>   */<a name="line.808"></a>
+<span class="sourceLineNo">809</span>  private void createTable(TableName tableName, Path hfofDir, AsyncAdmin admin) throws IOException {<a name="line.809"></a>
+<span class="sourceLineNo">810</span>    final FileSystem fs = hfofDir.getFileSystem(getConf());<a name="line.810"></a>
+<span class="sourceLineNo">811</span><a name="line.811"></a>
+<span class="sourceLineNo">812</span>    // Add column families<a name="line.812"></a>
+<span class="sourceLineNo">813</span>    // Build a set of keys<a name="line.813"></a>
+<span class="sourceLineNo">814</span>    List&lt;ColumnFamilyDescriptorBuilder&gt; familyBuilders = new ArrayList&lt;&gt;();<a name="line.814"></a>
+<span class="sourceLineNo">815</span>    SortedMap&lt;byte[], Integer&gt; map = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.815"></a>
+<span class="sourceLineNo">816</span>    visitBulkHFiles(fs, hfofDir, new BulkHFileVisitor&lt;ColumnFamilyDescriptorBuilder&gt;() {<a name="line.816"></a>
+<span class="sourceLineNo">817</span>      @Override<a name="line.817"></a>
+<span class="sourceLineNo">818</span>      public ColumnFamilyDescriptorBuilder bulkFamily(byte[] familyName) {<a name="line.818"></a>
+<span class="sourceLineNo">819</span>        ColumnFamilyDescriptorBuilder builder =<a name="line.819"></a>
+<span class="sourceLineNo">820</span>          ColumnFamilyDescriptorBuilder.newBuilder(familyName);<a name="line.820"></a>
+<span class="sourceLineNo">821</span>        familyBuilders.add(builder);<a name="line.821"></a>
+<span class="sourceLineNo">822</span>        return builder;<a name="line.822"></a>
+<span class="sourceLineNo">823</span>      }<a name="line.823"></a>
+<span class="sourceLineNo">824</span><a name="line.824"></a>
+<span class="sourceLineNo">825</span>      @Override<a name="line.825"></a>
+<span class="sourceLineNo">826</span>      public void bulkHFile(ColumnFamilyDescriptorBuilder builder, FileStatus hfileStatus)<a name="line.826"></a>
+<span class="sourceLineNo">827</span>          throws IOException {<a name="line.827"></a>
+<span class="sourceLineNo">828</span>        Path hfile = hfileStatus.getPath();<a name="line.828"></a>
+<span class="sourceLineNo">829</span>        try (HFile.Reader reader =<a name="line.829"></a>
+<span class="sourceLineNo">830</span>          HFile.createReader(fs, hfile, CacheConfig.DISABLED, true, getConf())) {<a name="line.830"></a>
+<span class="sourceLineNo">831</span>          if (builder.getCompressionType() != reader.getFileContext().getCompression()) {<a name="line.831"></a>
+<span class="sourceLineNo">832</span>            builder.setCompressionType(reader.getFileContext().getCompression());<a name="line.832"></a>
+<span class="sourceLineNo">833</span>            LOG.info("Setting compression " + reader.getFileContext().getCompression().name() +<a name="line.833"></a>
+<span class="sourceLineNo">834</span>              " for family " + builder.getNameAsString());<a name="line.834"></a>
+<span class="sourceLineNo">835</span>          }<a name="line.835"></a>
+<span class="sourceLineNo">836</span>          byte[] first = reader.getFirstRowKey().get();<a name="line.836"></a>
+<span class="sourceLineNo">837</span>          byte[] last = reader.getLastRowKey().get();<a name="line.837"></a>
+<span class="sourceLineNo">838</span><a name="line.838"></a>
+<span class="sourceLineNo">839</span>          LOG.info("Trying to figure out region boundaries hfile=" + hfile + " first=" +<a name="line.839"></a>
+<span class="sourceLineNo">840</span>            Bytes.toStringBinary(first) + " last=" + Bytes.toStringBinary(last));<a name="line.840"></a>
+<span class="sourceLineNo">841</span><a name="line.841"></a>
+<span class="sourceLineNo">842</span>          // To eventually infer start key-end key boundaries<a name="line.842"></a>
+<span class="sourceLineNo">843</span>          Integer value = map.getOrDefault(first, 0);<a name="line.843"></a>
+<span class="sourceLineNo">844</span>          map.put(first, value + 1);<a name="line.844"></a>
+<span class="sourceLineNo">845</span><a name="line.845"></a>
+<span class="sourceLineNo">846</span>          value = map.containsKey(last) ? map.get(last) : 0;<a name="line.846"></a>
+<span class="sourceLineNo">847</span>          map.put(last, value - 1);<a name="line.847"></a>
+<span class="sourceLineNo">848</span>        }<a name="line.848"></a>
+<span class="sourceLineNo">849</span>      }<a name="line.849"></a>
+<span class="sourceLineNo">850</span>    }, true);<a name="line.850"></a>
 <span class="sourceLineNo">851</span><a name="line.851"></a>
-<span class="sourceLineNo">852</span>          value = map.containsKey(last) ? map.get(last) : 0;<a name="line.852"></a>
-<span class="sourceLineNo">853</span>          map.put(last, value - 1);<a name="line.853"></a>
-<span class="sourceLineNo">854</span>        }<a name="line.854"></a>
-<span class="sourceLineNo">855</span>      }<a name="line.855"></a>
-<span class="sourceLineNo">856</span>    }, true);<a name="line.856"></a>
+<span class="sourceLineNo">852</span>    byte[][] keys = inferBoundaries(map);<a name="line.852"></a>
+<span class="sourceLineNo">853</span>    TableDescriptorBuilder tdBuilder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.853"></a>
+<span class="sourceLineNo">854</span>    familyBuilders.stream().map(ColumnFamilyDescriptorBuilder::build)<a name="line.854"></a>
+<span class="sourceLineNo">855</span>      .forEachOrdered(tdBuilder::setColumnFamily);<a name="line.855"></a>
+<span class="sourceLineNo">856</span>    FutureUtils.get(admin.createTable(tdBuilder.build(), keys));<a name="line.856"></a>
 <span class="sourceLineNo">857</span><a name="line.857"></a>
-<span class="sourceLineNo">858</span>    byte[][] keys = inferBoundaries(map);<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    TableDescriptorBuilder tdBuilder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.859"></a>
-<span class="sourceLineNo">860</span>    familyBuilders.stream().map(ColumnFamilyDescriptorBuilder::build)<a name="line.860"></a>
-<span class="sourceLineNo">861</span>      .forEachOrdered(tdBuilder::setColumnFamily);<a name="line.861"></a>
-<span class="sourceLineNo">862</span>    FutureUtils.get(admin.createTable(tdBuilder.build(), keys));<a name="line.862"></a>
-<span class="sourceLineNo">863</span><a name="line.863"></a>
-<span class="sourceLineNo">864</span>    LOG.info("Table " + tableName + " is available!!");<a name="line.864"></a>
-<span class="sourceLineNo">865</span>  }<a name="line.865"></a>
-<span class="sourceLineNo">866</span><a name="line.866"></a>
-<span class="sourceLineNo">867</span>  private Map&lt;LoadQueueItem, ByteBuffer&gt; performBulkLoad(AsyncClusterConnection conn,<a name="line.867"></a>
-<span class="sourceLineNo">868</span>      TableName tableName, Deque&lt;LoadQueueItem&gt; queue, ExecutorService pool, boolean copyFile)<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      throws IOException {<a name="line.869"></a>
-<span class="sourceLineNo">870</span>    int count = 0;<a name="line.870"></a>
-<span class="sourceLineNo">871</span><a name="line.871"></a>
-<span class="sourceLineNo">872</span>    fsDelegationToken.acquireDelegationToken(queue.peek().getFilePath().getFileSystem(getConf()));<a name="line.872"></a>
-<span class="sourceLineNo">873</span>    bulkToken = FutureUtils.get(conn.prepareBulkLoad(tableName));<a name="line.873"></a>
-<span class="sourceLineNo">874</span>    Pair&lt;Multimap&lt;ByteBuffer, LoadQueueItem&gt;, Set&lt;String&gt;&gt; pair = null;<a name="line.874"></a>
-<span class="sourceLineNo">875</span><a name="line.875"></a>
-<span class="sourceLineNo">876</span>    Map&lt;LoadQueueItem, ByteBuffer&gt; item2RegionMap = new HashMap&lt;&gt;();<a name="line.876"></a>
-<span class="sourceLineNo">877</span>    // Assumes that region splits can happen while this occurs.<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    while (!queue.isEmpty()) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>      // need to reload split keys each iteration.<a name="line.879"></a>
-<span class="sourceLineNo">880</span>      final List&lt;Pair&lt;byte[], byte[]&gt;&gt; startEndKeys =<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        FutureUtils.get(conn.getRegionLocator(tableName).getStartEndKeys());<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      if (count != 0) {<a name="line.882"></a>
-<span class="sourceLineNo">883</span>        LOG.info("Split occurred while grouping HFiles, retry attempt " + count + " with " +<a name="line.883"></a>
-<span class="sourceLineNo">884</span>          queue.size() + " files remaining to group or split");<a name="line.884"></a>
-<span class="sourceLineNo">885</span>      }<a name="line.885"></a>
-<span class="sourceLineNo">886</span><a name="line.886"></a>
-<span class="sourceLineNo">887</span>      int maxRetries = getConf().getInt(HConstants.BULKLOAD_MAX_RETRIES_NUMBER, 10);<a name="line.887"></a>
-<span class="sourceLineNo">888</span>      maxRetries = Math.max(maxRetries, startEndKeys.size() + 1);<a name="line.888"></a>
-<span class="sourceLineNo">889</span>      if (maxRetries != 0 &amp;&amp; count &gt;= maxRetries) {<a name="line.889"></a>
-<span class="sourceLineNo">890</span>        throw new IOException(<a name="line.890"></a>
-<span class="sourceLineNo">891</span>          "Retry attempted " + count + " times without completing, bailing out");<a name="line.891"></a>
-<span class="sourceLineNo">892</span>      }<a name="line.892"></a>
-<span class="sourceLineNo">893</span>      count++;<a name="line.893"></a>
-<span class="sourceLineNo">894</span><a name="line.894"></a>
-<span class="sourceLineNo">895</span>      // Using ByteBuffer for byte[] equality semantics<a name="line.895"></a>
-<span class="sourceLineNo">896</span>      pair = groupOrSplitPhase(conn, tableName, pool, queue, startEndKeys);<a name="line.896"></a>
-<span class="sourceLineNo">897</span>      Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups = pair.getFirst();<a name="line.897"></a>
+<span class="sourceLineNo">858</span>    LOG.info("Table " + tableName + " is available!!");<a name="line.858"></a>
+<span class="sourceLineNo">859</span>  }<a name="line.859"></a>
+<span class="sourceLineNo">860</span><a name="line.860"></a>
+<span class="sourceLineNo">861</span>  private Map&lt;LoadQueueItem, ByteBuffer&gt; performBulkLoad(AsyncClusterConnection conn,<a name="line.861"></a>
+<span class="sourceLineNo">862</span>      TableName tableName, Deque&lt;LoadQueueItem&gt; queue, ExecutorService pool, boolean copyFile)<a name="line.862"></a>
+<span class="sourceLineNo">863</span>      throws IOException {<a name="line.863"></a>
+<span class="sourceLineNo">864</span>    int count = 0;<a name="line.864"></a>
+<span class="sourceLineNo">865</span><a name="line.865"></a>
+<span class="sourceLineNo">866</span>    fsDelegationToken.acquireDelegationToken(queue.peek().getFilePath().getFileSystem(getConf()));<a name="line.866"></a>
+<span class="sourceLineNo">867</span>    bulkToken = FutureUtils.get(conn.prepareBulkLoad(tableName));<a name="line.867"></a>
+<span class="sourceLineNo">868</span>    Pair&lt;Multimap&lt;ByteBuffer, LoadQueueItem&gt;, Set&lt;String&gt;&gt; pair = null;<a name="line.868"></a>
+<span class="sourceLineNo">869</span><a name="line.869"></a>
+<span class="sourceLineNo">870</span>    Map&lt;LoadQueueItem, ByteBuffer&gt; item2RegionMap = new HashMap&lt;&gt;();<a name="line.870"></a>
+<span class="sourceLineNo">871</span>    // Assumes that region splits can happen while this occurs.<a name="line.871"></a>
+<span class="sourceLineNo">872</span>    while (!queue.isEmpty()) {<a name="line.872"></a>
+<span class="sourceLineNo">873</span>      // need to reload split keys each iteration.<a name="line.873"></a>
+<span class="sourceLineNo">874</span>      final List&lt;Pair&lt;byte[], byte[]&gt;&gt; startEndKeys =<a name="line.874"></a>
+<span class="sourceLineNo">875</span>        FutureUtils.get(conn.getRegionLocator(tableName).getStartEndKeys());<a name="line.875"></a>
+<span class="sourceLineNo">876</span>      if (count != 0) {<a name="line.876"></a>
+<span class="sourceLineNo">877</span>        LOG.info("Split occurred while grouping HFiles, retry attempt " + count + " with " +<a name="line.877"></a>
+<span class="sourceLineNo">878</span>          queue.size() + " files remaining to group or split");<a name="line.878"></a>
+<span class="sourceLineNo">879</span>      }<a name="line.879"></a>
+<span class="sourceLineNo">880</span><a name="line.880"></a>
+<span class="sourceLineNo">881</span>      int maxRetries = getConf().getInt(HConstants.BULKLOAD_MAX_RETRIES_NUMBER, 10);<a name="line.881"></a>
+<span class="sourceLineNo">882</span>      maxRetries = Math.max(maxRetries, startEndKeys.size() + 1);<a name="line.882"></a>
+<span class="sourceLineNo">883</span>      if (maxRetries != 0 &amp;&amp; count &gt;= maxRetries) {<a name="line.883"></a>
+<span class="sourceLineNo">884</span>        throw new IOException(<a name="line.884"></a>
+<span class="sourceLineNo">885</span>          "Retry attempted " + count + " times without completing, bailing out");<a name="line.885"></a>
+<span class="sourceLineNo">886</span>      }<a name="line.886"></a>
+<span class="sourceLineNo">887</span>      count++;<a name="line.887"></a>
+<span class="sourceLineNo">888</span><a name="line.888"></a>
+<span class="sourceLineNo">889</span>      // Using ByteBuffer for byte[] equality semantics<a name="line.889"></a>
+<span class="sourceLineNo">890</span>      pair = groupOrSplitPhase(conn, tableName, pool, queue, startEndKeys);<a name="line.890"></a>
+<span class="sourceLineNo">891</span>      Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups = pair.getFirst();<a name="line.891"></a>
+<span class="sourceLineNo">892</span><a name="line.892"></a>
+<span class="sourceLineNo">893</span>      if (!checkHFilesCountPerRegionPerFamily(regionGroups)) {<a name="line.893"></a>
+<span class="sourceLineNo">894</span>        // Error is logged inside checkHFilesCountPerRegionPerFamily.<a name="line.894"></a>
+<span class="sourceLineNo">895</span>        throw new IOException("Trying to load more than " + maxFilesPerRegionPerFamily +<a name="line.895"></a>
+<span class="sourceLineNo">896</span>          " hfiles to one family of one region");<a name="line.896"></a>
+<span class="sourceLineNo">897</span>      }<a name="line.897"></a>
 <span class="sourceLineNo">898</span><a name="line.898"></a>
-<span class="sourceLineNo">899</span>      if (!checkHFilesCountPerRegionPerFamily(regionGroups)) {<a name="line.899"></a>
-<span class="sourceLineNo">900</span>        // Error is logged inside checkHFilesCountPerRegionPerFamily.<a name="line.900"></a>
-<span class="sourceLineNo">901</span>        throw new IOException("Trying to load more than " + maxFilesPerRegionPerFamily +<a name="line.901"></a>
-<span class="sourceLineNo">902</span>          " hfiles to one family of one region");<a name="line.902"></a>
-<span class="sourceLineNo">903</span>      }<a name="line.903"></a>
-<span class="sourceLineNo">904</span><a name="line.904"></a>
-<span class="sourceLineNo">905</span>      bulkLoadPhase(conn, tableName, queue, regionGroups, copyFile, item2RegionMap);<a name="line.905"></a>
-<span class="sourceLineNo">906</span><a name="line.906"></a>
-<span class="sourceLineNo">907</span>      // NOTE: The next iteration's split / group could happen in parallel to<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      // atomic bulkloads assuming that there are splits and no merges, and<a name="line.908"></a>
-<span class="sourceLineNo">909</span>      // that we can atomically pull out the groups we want to retry.<a name="line.909"></a>
-<span class="sourceLineNo">910</span>    }<a name="line.910"></a>
-<span class="sourceLineNo">911</span><a name="line.911"></a>
-<span class="sourceLineNo">912</span>    return item2RegionMap;<a name="line.912"></a>
-<span class="sourceLineNo">913</span>  }<a name="line.913"></a>
-<span class="sourceLineNo">914</span><a name="line.914"></a>
-<span class="sourceLineNo">915</span>  private void cleanup(AsyncClusterConnection conn, TableName tableName, Deque&lt;LoadQueueItem&gt; queue,<a name="line.915"></a>
-<span class="sourceLineNo">916</span>      ExecutorService pool) throws IOException {<a name="line.916"></a>
-<span class="sourceLineNo">917</span>    fsDelegationToken.releaseDelegationToken();<a name="line.917"></a>
-<span class="sourceLineNo">918</span>    if (bulkToken != null) {<a name="line.918"></a>
-<span class="sourceLineNo">919</span>      conn.cleanupBulkLoad(tableName, bulkToken);<a name="line.919"></a>
-<span class="sourceLineNo">920</span>    }<a name="line.920"></a>
-<span class="sourceLineNo">921</span>    if (pool != null) {<a name="line.921"></a>
-<span class="sourceLineNo">922</span>      pool.shutdown();<a name="line.922"></a>
-<span class="sourceLineNo">923</span>    }<a name="line.923"></a>
-<span class="sourceLineNo">924</span>    if (!queue.isEmpty()) {<a name="line.924"></a>
-<span class="sourceLineNo">925</span>      StringBuilder err = new StringBuilder();<a name="line.925"></a>
-<span class="sourceLineNo">926</span>      err.append("-------------------------------------------------\n");<a name="line.926"></a>
-<span class="sourceLineNo">927</span>      err.append("Bulk load aborted with some files not yet loaded:\n");<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      err.append("-------------------------------------------------\n");<a name="line.928"></a>
-<span class="sourceLineNo">929</span>      for (LoadQueueItem q : queue) {<a name="line.929"></a>
-<span class="sourceLineNo">930</span>        err.append("  ").append(q.getFilePath()).append('\n');<a name="line.930"></a>
-<span class="sourceLineNo">931</span>      }<a name="line.931"></a>
-<span class="sourceLineNo">932</span>      LOG.error(err.toString());<a name="line.932"></a>
-<span class="sourceLineNo">933</span>    }<a name="line.933"></a>
-<span class="sourceLineNo">934</span>  }<a name="line.934"></a>
-<span class="sourceLineNo">935</span><a name="line.935"></a>
-<span class="sourceLineNo">936</span>  /**<a name="line.936"></a>
-<span class="sourceLineNo">937</span>   * Perform a bulk load of the given map of families to hfiles into the given pre-existing table.<a name="line.937"></a>
-<span class="sourceLineNo">938</span>   * This method is not threadsafe.<a name="line.938"></a>
-<span class="sourceLineNo">939</span>   * @param map map of family to List of hfiles<a name="line.939"></a>
-<span class="sourceLineNo">940</span>   * @param tableName table to load the hfiles<a name="line.940"></a>
-<span class="sourceLineNo">941</span>   * @param silence true to ignore unmatched column families<a name="line.941"></a>
-<span class="sourceLineNo">942</span>   * @param copyFile always copy hfiles if true<a name="line.942"></a>
-<span class="sourceLineNo">943</span>   */<a name="line.943"></a>
-<span class="sourceLineNo">944</span>  private Map&lt;LoadQueueItem, ByteBuffer&gt; doBulkLoad(AsyncClusterConnection conn,<a name="line.944"></a>
-<span class="sourceLineNo">945</span>      TableName tableName, Map&lt;byte[], List&lt;Path&gt;&gt; map, boolean silence, boolean copyFile)<a name="line.945"></a>
-<span class="sourceLineNo">946</span>      throws IOException {<a name="line.946"></a>
-<span class="sourceLineNo">947</span>    tableExists(conn, tableName);<a name="line.947"></a>
-<span class="sourceLineNo">948</span>    // LQI queue does not need to be threadsafe -- all operations on this queue<a name="line.948"></a>
-<span class="sourceLineNo">949</span>    // happen in this thread<a name="line.949"></a>
-<span class="sourceLineNo">950</span>    Deque&lt;LoadQueueItem&gt; queue = new ArrayDeque&lt;&gt;();<a name="line.950"></a>
-<span class="sourceLineNo">951</span>    ExecutorService pool = null;<a name="line.951"></a>
-<span class="sourceLineNo">952</span>    try {<a name="line.952"></a>
-<span class="sourceLineNo">953</span>      prepareHFileQueue(conn, tableName, map, queue, silence);<a name="line.953"></a>
-<span class="sourceLineNo">954</span>      if (queue.isEmpty()) {<a name="line.954"></a>
-<span class="sourceLineNo">955</span>        LOG.warn("Bulk load operation did not get any files to load");<a name="line.955"></a>
-<span class="sourceLineNo">956</span>        return Collections.emptyMap();<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      }<a name="line.957"></a>
-<span class="sourceLineNo">958</span>      pool = createExecutorService();<a name="line.958"></a>
-<span class="sourceLineNo">959</span>      return performBulkLoad(conn, tableName, queue, pool, copyFile);<a name="line.959"></a>
-<span class="sourceLineNo">960</span>    } finally {<a name="line.960"></a>
-<span class="sourceLineNo">961</span>      cleanup(conn, tableName, queue, pool);<a name="line.961"></a>
-<span class="sourceLineNo">962</span>    }<a name="line.962"></a>
-<span class="sourceLineNo">963</span>  }<a name="line.963"></a>
-<span class="sourceLineNo">964</span><a name="line.964"></a>
-<span class="sourceLineNo">965</span>  /**<a name="line.965"></a>
-<span class="sourceLineNo">966</span>   * Perform a bulk load of the given directory into the given pre-existing table. This method is<a name="line.966"></a>
-<span class="sourceLineNo">967</span>   * not threadsafe.<a name="line.967"></a>
-<span class="sourceLineNo">968</span>   * @param tableName table to load the hfiles<a name="line.968"></a>
-<span class="sourceLineNo">969</span>   * @param hfofDir the directory that was provided as the output path of a job using<a name="line.969"></a>
-<span class="sourceLineNo">970</span>   *          HFileOutputFormat<a name="line.970"></a>
-<span class="sourceLineNo">971</span>   * @param silence true to ignore unmatched column families<a name="line.971"></a>
-<span class="sourceLineNo">972</span>   * @param copyFile always copy hfiles if true<a name="line.972"></a>
-<span class="sourceLineNo">973</span>   */<a name="line.973"></a>
-<span class="sourceLineNo">974</span>  private Map&lt;LoadQueueItem, ByteBuffer&gt; doBulkLoad(AsyncClusterConnection conn,<a name="line.974"></a>
-<span class="sourceLineNo">975</span>      TableName tableName, Path hfofDir, boolean silence, boolean copyFile)<a name="line.975"></a>
-<span class="sourceLineNo">976</span>      throws IOException {<a name="line.976"></a>
-<span class="sourceLineNo">977</span>    tableExists(conn, tableName);<a name="line.977"></a>
-<span class="sourceLineNo">978</span><a name="line.978"></a>
-<span class="sourceLineNo">979</span>    /*<a name="line.979"></a>
-<span class="sourceLineNo">980</span>     * Checking hfile format is a time-consuming operation, we should have an option to skip this<a name="line.980"></a>
-<span class="sourceLineNo">981</span>     * step when bulkloading millions of HFiles. See HBASE-13985.<a name="line.981"></a>
-<span class="sourceLineNo">982</span>     */<a name="line.982"></a>
-<span class="sourceLineNo">983</span>    boolean validateHFile = getConf().getBoolean(VALIDATE_HFILES, true);<a name="line.983"></a>
-<span class="sourceLineNo">984</span>    if (!validateHFile) {<a name="line.984"></a>
-<span class="sourceLineNo">985</span>      LOG.warn("You are skipping HFiles validation, it might cause some data loss if files " +<a name="line.985"></a>
-<span class="sourceLineNo">986</span>        "are not correct. If you fail to read data from your table after using this " +<a name="line.986"></a>
-<span class="sourceLineNo">987</span>        "option, consider removing the files and bulkload again without this option. " +<a name="line.987"></a>
-<span class="sourceLineNo">988</span>        "See HBASE-13985");<a name="line.988"></a>
-<span class="sourceLineNo">989</span>    }<a name="line.989"></a>
-<span class="sourceLineNo">990</span>    // LQI queue does not need to be threadsafe -- all operations on this queue<a name="line.990"></a>
-<span class="sourceLineNo">991</span>    // happen in this thread<a name="line.991"></a>
-<span class="sourceLineNo">992</span>    Deque&lt;LoadQueueItem&gt; queue = new ArrayDeque&lt;&gt;();<a name="line.992"></a>
-<span class="sourceLineNo">993</span>    ExecutorService pool = null;<a name="line.993"></a>
-<span class="sourceLineNo">994</span>    try {<a name="line.994"></a>
-<span class="sourceLineNo">995</span>      prepareHFileQueue(getConf(), conn, tableName, hfofDir, queue, validateHFile, silence);<a name="line.995"></a>
-<span class="sourceLineNo">996</span><a name="line.996"></a>
-<span class="sourceLineNo">997</span>      if (queue.isEmpty()) {<a name="line.997"></a>
-<span class="sourceLineNo">998</span>        LOG.warn(<a name="line.998"></a>
-<span class="sourceLineNo">999</span>          "Bulk load operation did not find any files to load in directory {}. " +<a name="line.999"></a>
-<span class="sourceLineNo">1000</span>          "Does it contain files in subdirectories that correspond to column family names?",<a name="line.1000"></a>
-<span class="sourceLineNo">1001</span>          (hfofDir != null ? hfofDir.toUri().toString() : ""));<a name="line.1001"></a>
-<span class="sourceLineNo">1002</span>        return Collections.emptyMap();<a name="line.1002"></a>
-<span class="sourceLineNo">1003</span>      }<a name="line.1003"></a>
-<span class="sourceLineNo">1004</span>      pool = createExecutorService();<a name="line.1004"></a>
-<span class="sourceLineNo">1005</span>      return performBulkLoad(conn, tableName, queue, pool, copyFile);<a name="line.1005"></a>
-<span class="sourceLineNo">1006</span>    } finally {<a name="line.1006"></a>
-<span class="sourceLineNo">1007</span>      cleanup(conn, tableName, queue, pool);<a name="line.1007"></a>
-<span class="sourceLineNo">1008</span>    }<a name="line.1008"></a>
-<span class="sourceLineNo">1009</span>  }<a name="line.1009"></a>
-<span class="sourceLineNo">1010</span><a name="line.1010"></a>
-<span class="sourceLineNo">1011</span>  @Override<a name="line.1011"></a>
-<span class="sourceLineNo">1012</span>  public Map&lt;LoadQueueItem, ByteBuffer&gt; bulkLoad(TableName tableName,<a name="line.1012"></a>
-<span class="sourceLineNo">1013</span>      Map&lt;byte[], List&lt;Path&gt;&gt; family2Files) throws IOException {<a name="line.1013"></a>
-<span class="sourceLineNo">1014</span>    try (AsyncClusterConnection conn = ClusterConnectionFactory.<a name="line.1014"></a>
-<span class="sourceLineNo">1015</span>        createAsyncClusterConnection(getConf(), null, userProvider.getCurrent())) {<a name="line.1015"></a>
-<span class="sourceLineNo">1016</span>      return doBulkLoad(conn, tableName, family2Files, isSilence(), isAlwaysCopyFiles());<a name="line.1016"></a>
-<span class="sourceLineNo">1017</span>    }<a name="line.1017"></a>
-<span class="sourceLineNo">1018</span>  }<a name="line.1018"></a>
-<span class="sourceLineNo">1019</span><a name="line.1019"></a>
-<span class="sourceLineNo">1020</span>  @Override<a name="line.1020"></a>
-<span class="sourceLineNo">1021</span>  public Map&lt;LoadQueueItem, ByteBuffer&gt; bulkLoad(TableName tableName, Path dir)<a name="line.1021"></a>
-<span class="sourceLineNo">1022</span>      throws IOException {<a name="line.1022"></a>
-<span class="sourceLineNo">1023</span>    try (AsyncClusterConnection conn = ClusterConnectionFactory<a name="line.1023"></a>
-<span class="sourceLineNo">1024</span>      .createAsyncClusterConnection(getConf(), null, userProvider.getCurrent())) {<a name="line.1024"></a>
-<span class="sourceLineNo">1025</span>      AsyncAdmin admin = conn.getAdmin();<a name="line.1025"></a>
-<span class="sourceLineNo">1026</span>      if (!FutureUtils.get(admin.tableExists(tableName))) {<a name="line.1026"></a>
-<span class="sourceLineNo">1027</span>        if (isCreateTable()) {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          createTable(tableName, dir, admin);<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>        } else {<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          throwAndLogTableNotFoundException(tableName);<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>        }<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>      }<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>      return doBulkLoad(conn, tableName, dir, isSilence(), isAlwaysCopyFiles());<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>    }<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>  }<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span><a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>  /**<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>   * @throws TableNotFoundException if table does not exist.<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>   */<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>  private void tableExists(AsyncClusterConnection conn, TableName tableName) throws IOException {<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>    if (!FutureUtils.get(conn.getAdmin().tableExists(tableName))) {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>      throwAndLogTableNotFoundException(tableName);<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>    }<a name="line.1043"></a>
+<span class="sourceLineNo">899</span>      bulkLoadPhase(conn, tableName, queue, regionGroups, copyFile, item2RegionMap);<a name="line.899"></a>
+<span class="sourceLineNo">900</span><a name="line.900"></a>
+<span class="sourceLineNo">901</span>      // NOTE: The next iteration's split / group could happen in parallel to<a name="line.901"></a>
+<span class="sourceLineNo">902</span>      // atomic bulkloads assuming that there are splits and no merges, and<a name="line.902"></a>
+<span class="sourceLineNo">903</span>      // that we can atomically pull out the groups we want to retry.<a name="line.903"></a>
+<span class="sourceLineNo">904</span>    }<a name="line.904"></a>
+<span class="sourceLineNo">905</span><a name="line.905"></a>
+<span class="sourceLineNo">906</span>    return item2RegionMap;<a name="line.906"></a>
+<span class="sourceLineNo">907</span>  }<a name="line.907"></a>
+<span class="sourceLineNo">908</span><a name="line.908"></a>
+<span class="sourceLineNo">909</span>  private void cleanup(AsyncClusterConnection conn, TableName tableName, Deque&lt;LoadQueueItem&gt; queue,<a name="line.909"></a>
+<span class="sourceLineNo">910</span>      ExecutorService pool) throws IOException {<a name="line.910"></a>
+<span class="sourceLineNo">911</span>    fsDelegationToken.releaseDelegationToken();<a name="line.911"></a>
+<span class="sourceLineNo">912</span>    if (bulkToken != null) {<a name="line.912"></a>
+<span class="sourceLineNo">913</span>      conn.cleanupBulkLoad(tableName, bulkToken);<a name="line.913"></a>
+<span class="sourceLineNo">914</span>    }<a name="line.914"></a>
+<span class="sourceLineNo">915</span>    if (pool != null) {<a name="line.915"></a>
+<span class="sourceLineNo">916</span>      pool.shutdown();<a name="line.916"></a>
+<span class="sourceLineNo">917</span>    }<a name="line.917"></a>
+<span class="sourceLineNo">918</span>    if (!queue.isEmpty()) {<a name="line.918"></a>
+<span class="sourceLineNo">919</span>      StringBuilder err = new StringBuilder();<a name="line.919"></a>
+<span class="sourceLineNo">920</span>      err.append("-------------------------------------------------\n");<a name="line.920"></a>
+<span class="sourceLineNo">921</span>      err.append("Bulk load aborted with some files not yet loaded:\n");<a name="line.921"></a>
+<span class="sourceLineNo">922</span>      err.append("-------------------------------------------------\n");<a name="line.922"></a>
+<span class="sourceLineNo">923</span>      for (LoadQueueItem q : queue) {<a name="line.923"></a>
+<span class="sourceLineNo">924</span>        err.append("  ").append(q.getFilePath()).append('\n');<a name="line.924"></a>
+<span class="sourceLineNo">925</span>      }<a name="line.925"></a>
+<span class="sourceLineNo">926</span>      LOG.error(err.toString());<a name="line.926"></a>
+<span class="sourceLineNo">927</span>    }<a name="line.927"></a>
+<span class="sourceLineNo">928</span>  }<a name="line.928"></a>
+<span class="sourceLineNo">929</span><a name="line.929"></a>
+<span class="sourceLineNo">930</span>  /**<a name="line.930"></a>
+<span class="sourceLineNo">931</span>   * Perform a bulk load of the given map of families to hfiles into the given pre-existing table.<a name="line.931"></a>
+<span class="sourceLineNo">932</span>   * This method is not threadsafe.<a name="line.932"></a>
+<span class="sourceLineNo">933</span>   * @param map map of family to List of hfiles<a name="line.933"></a>
+<span class="sourceLineNo">934</span>   * @param tableName table to load the hfiles<a name="line.934"></a>
+<span class="sourceLineNo">935</span>   * @param silence true to ignore unmatched column families<a name="line.935"></a>
+<span class="sourceLineNo">936</span>   * @param copyFile always copy hfiles if true<a name="line.936"></a>
+<span class="sourceLineNo">937</span>   */<a name="line.937"></a>
+<span class="sourceLineNo">938</span>  private Map&lt;LoadQueueItem, ByteBuffer&gt; doBulkLoad(AsyncClusterConnection conn,<a name="line.938"></a>
+<span class="sourceLineNo">939</span>      TableName tableName, Map&lt;byte[], List&lt;Path&gt;&gt; map, boolean silence, boolean copyFile)<a name="line.939"></a>
+<span class="sourceLineNo">940</span>      throws IOException {<a name="line.940"></a>
+<span class="sourceLineNo">941</span>    tableExists(conn, tableName);<a name="line.941"></a>
+<span class="sourceLineNo">942</span>    // LQI queue does not need to be threadsafe -- all operations on this queue<a name="line.942"></a>
+<span class="sourceLineNo">943</span>    // happen in this thread<a name="line.943"></a>
+<span class="sourceLineNo">944</span>    Deque&lt;LoadQueueItem&gt; queue = new ArrayDeque&lt;&gt;();<a name="line.944"></a>
+<span class="sourceLineNo">945</span>    ExecutorService pool = null;<a name="line.945"></a>
+<span class="sourceLineNo">946</span>    try {<a name="line.946"></a>
+<span class="sourceLineNo">947</span>      prepareHFileQueue(conn, tableName, map, queue, silence);<a name="line.947"></a>
+<span class="sourceLineNo">948</span>      if (queue.isEmpty()) {<a name="line.948"></a>
+<span class="sourceLineNo">949</span>        LOG.warn("Bulk load operation did not get any files to load");<a name="line.949"></a>
+<span class="sourceLineNo">950</span>        return Collections.emptyMap();<a name="line.950"></a>
+<span class="sourceLineNo">951</span>      }<a name="line.951"></a>
+<span class="sourceLineNo">952</span>      pool = createExecutorService();<a name="line.952"></a>
+<span class="sourceLineNo">953</span>      return performBulkLoad(conn, tableName, queue, pool, copyFile);<a name="line.953"></a>
+<span class="sourceLineNo">954</span>    } finally {<a name="line.954"></a>
+<span class="sourceLineNo">955</span>      cleanup(conn, tableName, queue, pool);<a name="line.955"></a>
+<span class="sourceLineNo">956</span>    }<a name="line.956"></a>
+<span class="sourceLineNo">957</span>  }<a name="line.957"></a>
+<span class="sourceLineNo">958</span><a name="line.958"></a>
+<span class="sourceLineNo">959</span>  /**<a name="line.959"></a>
+<span class="sourceLineNo">960</span>   * Perform a bulk load of the given directory into the given pre-existing table. This method is<a name="line.960"></a>
+<span class="sourceLineNo">961</span>   * not threadsafe.<a name="line.961"></a>
+<span class="sourceLineNo">962</span>   * @param tableName table to load the hfiles<a name="line.962"></a>
+<span class="sourceLineNo">963</span>   * @param hfofDir the directory that was provided as the output path of a job using<a name="line.963"></a>
+<span class="sourceLineNo">964</span>   *          HFileOutputFormat<a name="line.964"></a>
+<span class="sourceLineNo">965</span>   * @param silence true to ignore unmatched column families<a name="line.965"></a>
+<span class="sourceLineNo">966</span>   * @param copyFile always copy hfiles if true<a name="line.966"></a>
+<span class="sourceLineNo">967</span>   */<a name="line.967"></a>
+<span class="sourceLineNo">968</span>  private Map&lt;LoadQueueItem, ByteBuffer&gt; doBulkLoad(AsyncClusterConnection conn,<a name="line.968"></a>
+<span class="sourceLineNo">969</span>      TableName tableName, Path hfofDir, boolean silence, boolean copyFile)<a name="line.969"></a>
+<span class="sourceLineNo">970</span>      throws IOException {<a name="line.970"></a>
+<span class="sourceLineNo">971</span>    tableExists(conn, tableName);<a name="line.971"></a>
+<span class="sourceLineNo">972</span><a name="line.972"></a>
+<span class="sourceLineNo">973</span>    /*<a name="line.973"></a>
+<span class="sourceLineNo">974</span>     * Checking hfile format is a time-consuming operation, we should have an option to skip this<a name="line.974"></a>
+<span class="sourceLineNo">975</span>     * step when bulkloading millions of HFiles. See HBASE-13985.<a name="line.975"></a>
+<span class="sourceLineNo">976</span>     */<a name="line.976"></a>
+<span class="sourceLineNo">977</span>    boolean validateHFile = getConf().getBoolean(VALIDATE_HFILES, true);<a name="line.977"></a>
+<span class="sourceLineNo">978</span>    if (!validateHFile) {<a name="line.978"></a>
+<span class="sourceLineNo">979</span>      LOG.warn("You are skipping HFiles validation, it might cause some data loss if files " +<a name="line.979"></a>
+<span class="sourceLineNo">980</span>        "are not correct. If you fail to read data from your table after using this " +<a name="line.980"></a>
+<span class="sourceLineNo">981</span>        "option, consider removing the files and bulkload again without this option. " +<a name="line.981"></a>
+<span class="sourceLineNo">982</span>        "See HBASE-13985");<a name="line.982"></a>
+<span class="sourceLineNo">983</span>    }<a name="line.983"></a>
+<span class="sourceLineNo">984</span>    // LQI queue does not need to be threadsafe -- all operations on this queue<a name="line.984"></a>
+<span class="sourceLineNo">985</span>    // happen in this thread<a name="line.985"></a>
+<span class="sourceLineNo">986</span>    Deque&lt;LoadQueueItem&gt; queue = new ArrayDeque&lt;&gt;();<a name="line.986"></a>
+<span class="sourceLineNo">987</span>    ExecutorService pool = null;<a name="line.987"></a>
+<span class="sourceLineNo">988</span>    try {<a name="line.988"></a>
+<span class="sourceLineNo">989</span>      prepareHFileQueue(getConf(), conn, tableName, hfofDir, queue, validateHFile, silence);<a name="line.989"></a>
+<span class="sourceLineNo">990</span><a name="line.990"></a>
+<span class="sourceLineNo">991</span>      if (queue.isEmpty()) {<a name="line.991"></a>
+<span class="sourceLineNo">992</span>        LOG.warn(<a name="line.992"></a>
+<span class="sourceLineNo">993</span>          "Bulk load operation did not find any files to load in directory {}. " +<a name="line.993"></a>
+<span class="sourceLineNo">994</span>          "Does it contain files in subdirectories that correspond to column family names?",<a name="line.994"></a>
+<span class="sourceLineNo">995</span>          (hfofDir != null ? hfofDir.toUri().toString() : ""));<a name="line.995"></a>
+<span class="sourceLineNo">996</span>        return Collections.emptyMap();<a name="line.996"></a>
+<span class="sourceLineNo">997</span>      }<a name="line.997"></a>
+<span class="sourceLineNo">998</span>      pool = createExecutorService();<a name="line.998"></a>
+<span class="sourceLineNo">999</span>      return performBulkLoad(conn, tableName, queue, pool, copyFile);<a name="line.999"></a>
+<span class="sourceLineNo">1000</span>    } finally {<a name="line.1000"></a>
+<span class="sourceLineNo">1001</span>      cleanup(conn, tableName, queue, pool);<a name="line.1001"></a>
+<span class="sourceLineNo">1002</span>    }<a name="line.1002"></a>
+<span class="sourceLineNo">1003</span>  }<a name="line.1003"></a>
+<span class="sourceLineNo">1004</span><a name="line.1004"></a>
+<span class="sourceLineNo">1005</span>  @Override<a name="line.1005"></a>
+<span class="sourceLineNo">1006</span>  public Map&lt;LoadQueueItem, ByteBuffer&gt; bulkLoad(TableName tableName,<a name="line.1006"></a>
+<span class="sourceLineNo">1007</span>      Map&lt;byte[], List&lt;Path&gt;&gt; family2Files) throws IOException {<a name="line.1007"></a>
+<span class="sourceLineNo">1008</span>    try (AsyncClusterConnection conn = ClusterConnectionFactory.<a name="line.1008"></a>
+<span class="sourceLineNo">1009</span>        createAsyncClusterConnection(getConf(), null, userProvider.getCurrent())) {<a name="line.1009"></a>
+<span class="sourceLineNo">1010</span>      return doBulkLoad(conn, tableName, family2Files, isSilence(), isAlwaysCopyFiles());<a name="line.1010"></a>
+<span class="sourceLineNo">1011</span>    }<a name="line.1011"></a>
+<span class="sourceLineNo">1012</span>  }<a name="line.1012"></a>
+<span class="sourceLineNo">1013</span><a name="line.1013"></a>
+<span class="sourceLineNo">1014</span>  @Override<a name="line.1014"></a>
+<span class="sourceLineNo">1015</span>  public Map&lt;LoadQueueItem, ByteBuffer&gt; bulkLoad(TableName tableName, Path dir)<a name="line.1015"></a>
+<span class="sourceLineNo">1016</span>      throws IOException {<a name="line.1016"></a>
+<span class="sourceLineNo">1017</span>    try (AsyncClusterConnection conn = ClusterConnectionFactory<a name="line.1017"></a>
+<span class="sourceLineNo">1018</span>      .createAsyncClusterConnection(getConf(), null, userProvider.getCurrent())) {<a name="line.1018"></a>
+<span class="sourceLineNo">1019</span>      AsyncAdmin admin = conn.getAdmin();<a name="line.1019"></a>
+<span class="sourceLineNo">1020</span>      if (!FutureUtils.get(admin.tableExists(tableName))) {<a name="line.1020"></a>
+<span class="sourceLineNo">1021</span>        if (isCreateTable()) {<a name="line.1021"></a>
+<span class="sourceLineNo">1022</span>          createTable(tableName, dir, admin);<a name="line.1022"></a>
+<span class="sourceLineNo">1023</span>        } else {<a name="line.1023"></a>
+<span class="sourceLineNo">1024</span>          throwAndLogTableNotFoundException(tableName);<a name="line.1024"></a>
+<span class="sourceLineNo">1025</span>        }<a name="line.1025"></a>
+<span class="sourceLineNo">1026</span>      }<a name="line.1026"></a>
+<span class="sourceLineNo">1027</span>      return doBulkLoad(conn, tableName, dir, isSilence(), isAlwaysCopyFiles());<a name="line.1027"></a>
+<span class="sourceLineNo">1028</span>    }<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>  }<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span><a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>  /**<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>   * @throws TableNotFoundException if table does not exist.<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>   */<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>  private void tableExists(AsyncClusterConnection conn, TableName tableName) throws IOException {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>    if (!FutureUtils.get(conn.getAdmin().tableExists(tableName))) {<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>      throwAndLogTableNotFoundException(tableName);<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>    }<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>  }<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span><a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>  private void throwAndLogTableNotFoundException(TableName tn) throws TableNotFoundException {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>    String errorMsg = format("Table '%s' does not exist.", tn);<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>    LOG.error(errorMsg);<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>    throw new TableNotFoundException(errorMsg);<a name="line.1043"></a>
 <span class="sourceLineNo">1044</span>  }<a name="line.1044"></a>
 <span class="sourceLineNo">1045</span><a name="line.1045"></a>
-<span class="sourceLineNo">1046</span>  private void throwAndLogTableNotFoundException(TableName tn) throws TableNotFoundException {<a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>    String errorMsg = format("Table '%s' does not exist.", tn);<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>    LOG.error(errorMsg);<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>    throw new TableNotFoundException(errorMsg);<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>  }<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span><a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>  public void setBulkToken(String bulkToken) {<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span>    this.bulkToken = bulkToken;<a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>  }<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span><a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>  public void setClusterIds(List&lt;String&gt; clusterIds) {<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>    this.clusterIds = clusterIds;<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>  }<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span><a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>  private void usage() {<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>    System.err.println("Usage: " + "bin/hbase completebulkload [OPTIONS] "<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>        + "&lt;/PATH/TO/HFILEOUTPUTFORMAT-OUTPUT&gt; &lt;TABLENAME&gt;\n"<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>        + "Loads directory of hfiles -- a region dir or product of HFileOutputFormat -- "<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>        + "into an hbase table.\n"<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>        + "OPTIONS (for other -D options, see source code):\n"<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>        + " -D" + CREATE_TABLE_CONF_KEY + "=no whether to create table; when 'no', target "<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>        + "table must exist.\n"<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>        + " -D" + IGNORE_UNMATCHED_CF_CONF_KEY + "=yes to ignore unmatched column families.\n"<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>        + " -loadTable for when directory of files to load has a depth of 3; target table must "<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>        + "exist;\n"<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        + " must be last of the options on command line.\n"<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span>        + "See http://hbase.apache.org/book.html#arch.bulk.load.complete.strays for "<a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>        + "documentation.\n");<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>  }<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span><a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>  @Override<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>  public int run(String[] args) throws Exception {<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    if (args.length != 2 &amp;&amp; args.length != 3) {<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>      usage();<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>      return -1;<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span>    }<a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    Path dirPath = new Path(args[0]);<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    TableName tableName = TableName.valueOf(args[1]);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    if (args.length == 2) {<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>      return !bulkLoad(tableName, dirPath).isEmpty() ? 0 : -1;<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span>    } else {<a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>      Map&lt;byte[], List&lt;Path&gt;&gt; family2Files = Maps.newHashMap();<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>      FileSystem fs = FileSystem.get(getConf());<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>      for (FileStatus regionDir : fs.listStatus(dirPath)) {<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>        FSVisitor.visitRegionStoreFiles(fs, regionDir.getPath(), (region, family, hfileName) -&gt; {<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>          Path path = new Path(regionDir.getPath(), new Path(family, hfileName));<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>          byte[] familyName = Bytes.toBytes(family);<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>          if (family2Files.containsKey(familyName)) {<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>            family2Files.get(familyName).add(path);<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>          } else {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>            family2Files.put(familyName, Lists.newArrayList(path));<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>          }<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>        });<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span>      }<a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>      return !bulkLoad(tableName, family2Files).isEmpty() ? 0 : -1;<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    }<a name="line.1101"></a>
+<span class="sourceLineNo">1046</span>  public void setBulkToken(String bulkToken) {<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>    this.bulkToken = bulkToken;<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>  }<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span><a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>  public void setClusterIds(List&lt;String&gt; clusterIds) {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>    this.clusterIds = clusterIds;<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span>  }<a name="line.1052"></a>
+<span class="sourceLineNo">1053</span><a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>  private void usage() {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>    System.err.println("Usage: " + "bin/hbase completebulkload [OPTIONS] "<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>        + "&lt;/PATH/TO/HFILEOUTPUTFORMAT-OUTPUT&gt; &lt;TABLENAME&gt;\n"<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>        + "Loads directory of hfiles -- a region dir or product of HFileOutputFormat -- "<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>        + "into an hbase table.\n"<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>        + "OPTIONS (for other -D options, see source code):\n"<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        + " -D" + CREATE_TABLE_CONF_KEY + "=no whether to create table; when 'no', target "<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>        + "table must exist.\n"<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>        + " -D" + IGNORE_UNMATCHED_CF_CONF_KEY + "=yes to ignore unmatched column families.\n"<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>        + " -loadTable for when directory of files to load has a depth of 3; target table must "<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>        + "exist;\n"<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>        + " must be last of the options on command line.\n"<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>        + "See http://hbase.apache.org/book.html#arch.bulk.load.complete.strays for "<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>        + "documentation.\n");<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>  }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span><a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>  @Override<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span>  public int run(String[] args) throws Exception {<a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    if (args.length != 2 &amp;&amp; args.length != 3) {<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>      usage();<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>      return -1;<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>    }<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>    Path dirPath = new Path(args[0]);<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    TableName tableName = TableName.valueOf(args[1]);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    if (args.length == 2) {<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>      return !bulkLoad(tableName, dirPath).isEmpty() ? 0 : -1;<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span>    } else {<a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>      Map&lt;byte[], List&lt;Path&gt;&gt; family2Files = Maps.newHashMap();<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>      FileSystem fs = FileSystem.get(getConf());<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>      for (FileStatus regionDir : fs.listStatus(dirPath)) {<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>        FSVisitor.visitRegionStoreFiles(fs, regionDir.getPath(), (region, family, hfileName) -&gt; {<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span>          Path path = new Path(regionDir.getPath(), new Path(family, hfileName));<a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>          byte[] familyName = Bytes.toBytes(family);<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>          if (family2Files.containsKey(familyName)) {<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>            family2Files.get(familyName).add(path);<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>          } else {<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>            family2Files.put(familyName, Lists.newArrayList(path));<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>          }<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>        });<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>      }<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>      return !bulkLoad(tableName, family2Files).isEmpty() ? 0 : -1;<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    }<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>  }<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span><a name="line.1097"></a>
+<span class="sourceLineNo">1098</span>  public static void main(String[] args) throws Exception {<a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    Configuration conf = HBaseConfiguration.create();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    int ret = ToolRunner.run(conf, new BulkLoadHFilesTool(conf), args);<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    System.exit(ret);<a name="line.1101"></a>
 <span class="sourceLineNo">1102</span>  }<a name="line.1102"></a>
 <span class="sourceLineNo">1103</span><a name="line.1103"></a>
-<span class="sourceLineNo">1104</span>  public static void main(String[] args) throws Exception {<a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    Configuration conf = HBaseConfiguration.create();<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>    int ret = ToolRunner.run(conf, new BulkLoadHFilesTool(conf), args);<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>    System.exit(ret);<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>  }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>  @Override<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>  public void disableReplication(){<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>    this.replicate = false;<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>  }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>  @Override<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>  public boolean isReplicationDisabled(){<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>    return !this.replicate;<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>  }<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>}<a name="line.1119"></a>
+<span class="sourceLineNo">1104</span>  @Override<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>  public void disableReplication(){<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>    this.replicate = false;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>  }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>  @Override<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>  public boolean isReplicationDisabled(){<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>    return !this.replicate;<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>  }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span>}<a name="line.1113"></a>
 
 
 
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html b/devapidocs/src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html
index 494125b..32993ae 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.html
@@ -133,998 +133,992 @@
 <span class="sourceLineNo">125</span>   */<a name="line.125"></a>
 <span class="sourceLineNo">126</span>  public static final String BULK_LOAD_HFILES_BY_FAMILY = "hbase.mapreduce.bulkload.by.family";<a name="line.126"></a>
 <span class="sourceLineNo">127</span><a name="line.127"></a>
-<span class="sourceLineNo">128</span>  //HDFS DelegationToken is cached and should be renewed before token expiration<a name="line.128"></a>
-<span class="sourceLineNo">129</span>  public static final String BULK_LOAD_RENEW_TOKEN_TIME_BUFFER<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    = "hbase.bulkload.renew.token.time.buffer";<a name="line.130"></a>
+<span class="sourceLineNo">128</span>  // We use a '.' prefix which is ignored when walking directory trees<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  // above. It is invalid family name.<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  static final String TMP_DIR = ".tmp";<a name="line.130"></a>
 <span class="sourceLineNo">131</span><a name="line.131"></a>
-<span class="sourceLineNo">132</span>  // We use a '.' prefix which is ignored when walking directory trees<a name="line.132"></a>
-<span class="sourceLineNo">133</span>  // above. It is invalid family name.<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  static final String TMP_DIR = ".tmp";<a name="line.134"></a>
+<span class="sourceLineNo">132</span>  private final int maxFilesPerRegionPerFamily;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>  private final boolean assignSeqIds;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  private boolean bulkLoadByFamily;<a name="line.134"></a>
 <span class="sourceLineNo">135</span><a name="line.135"></a>
-<span class="sourceLineNo">136</span>  private final int maxFilesPerRegionPerFamily;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>  private final boolean assignSeqIds;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>  private boolean bulkLoadByFamily;<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>  // Source delegation token<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  private final FsDelegationToken fsDelegationToken;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>  private final UserProvider userProvider;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>  private final int nrThreads;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  private final AtomicInteger numRetries = new AtomicInteger(0);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  private String bulkToken;<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  private List&lt;String&gt; clusterIds = new ArrayList&lt;&gt;();<a name="line.147"></a>
-<span class="sourceLineNo">148</span>  private boolean replicate = true;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>  private final long retryAheadTime;<a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>  public BulkLoadHFilesTool(Configuration conf) {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    // make a copy, just to be sure we're not overriding someone else's config<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    super(new Configuration(conf));<a name="line.153"></a>
-<span class="sourceLineNo">154</span>    // disable blockcache for tool invocation, see HBASE-10500<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    userProvider = UserProvider.instantiate(conf);<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    retryAheadTime = conf.getLong(BULK_LOAD_RENEW_TOKEN_TIME_BUFFER, 60000L);<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    fsDelegationToken = new FsDelegationToken(userProvider, "renewer", retryAheadTime);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>    assignSeqIds = conf.getBoolean(ASSIGN_SEQ_IDS, true);<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    maxFilesPerRegionPerFamily = conf.getInt(MAX_FILES_PER_REGION_PER_FAMILY, 32);<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    nrThreads = conf.getInt("hbase.loadincremental.threads.max",<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      Runtime.getRuntime().availableProcessors());<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    bulkLoadByFamily = conf.getBoolean(BULK_LOAD_HFILES_BY_FAMILY, false);<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  }<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>  // Initialize a thread pool<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  private ExecutorService createExecutorService() {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    ThreadPoolExecutor pool = new ThreadPoolExecutor(nrThreads, nrThreads, 60, TimeUnit.SECONDS,<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      new LinkedBlockingQueue&lt;&gt;(),<a name="line.169"></a>
-<span class="sourceLineNo">170</span>      new ThreadFactoryBuilder().setNameFormat("BulkLoadHFilesTool-%1$d").setDaemon(true).build());<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    pool.allowCoreThreadTimeOut(true);<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    return pool;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>  private boolean isCreateTable() {<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    return "yes".equalsIgnoreCase(getConf().get(CREATE_TABLE_CONF_KEY, "yes"));<a name="line.176"></a>
-<span class="sourceLineNo">177</span>  }<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  private boolean isSilence() {<a name="line.179"></a>
-<span class="sourceLineNo">180</span>    return "yes".equalsIgnoreCase(getConf().get(IGNORE_UNMATCHED_CF_CONF_KEY, ""));<a name="line.180"></a>
-<span class="sourceLineNo">181</span>  }<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>  private boolean isAlwaysCopyFiles() {<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    return getConf().getBoolean(ALWAYS_COPY_FILES, false);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  }<a name="line.185"></a>
+<span class="sourceLineNo">136</span>  // Source delegation token<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  private final FsDelegationToken fsDelegationToken;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>  private final UserProvider userProvider;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>  private final int nrThreads;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  private final AtomicInteger numRetries = new AtomicInteger(0);<a name="line.140"></a>
+<span class="sourceLineNo">141</span>  private String bulkToken;<a name="line.141"></a>
+<span class="sourceLineNo">142</span><a name="line.142"></a>
+<span class="sourceLineNo">143</span>  private List&lt;String&gt; clusterIds = new ArrayList&lt;&gt;();<a name="line.143"></a>
+<span class="sourceLineNo">144</span>  private boolean replicate = true;<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span>  public BulkLoadHFilesTool(Configuration conf) {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    // make a copy, just to be sure we're not overriding someone else's config<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    super(new Configuration(conf));<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    // disable blockcache for tool invocation, see HBASE-10500<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0);<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    userProvider = UserProvider.instantiate(conf);<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    fsDelegationToken = new FsDelegationToken(userProvider, "renewer");<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    assignSeqIds = conf.getBoolean(ASSIGN_SEQ_IDS, true);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    maxFilesPerRegionPerFamily = conf.getInt(MAX_FILES_PER_REGION_PER_FAMILY, 32);<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    nrThreads = conf.getInt("hbase.loadincremental.threads.max",<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      Runtime.getRuntime().availableProcessors());<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    bulkLoadByFamily = conf.getBoolean(BULK_LOAD_HFILES_BY_FAMILY, false);<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
+<span class="sourceLineNo">159</span><a name="line.159"></a>
+<span class="sourceLineNo">160</span>  // Initialize a thread pool<a name="line.160"></a>
+<span class="sourceLineNo">161</span>  private ExecutorService createExecutorService() {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    ThreadPoolExecutor pool = new ThreadPoolExecutor(nrThreads, nrThreads, 60, TimeUnit.SECONDS,<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      new LinkedBlockingQueue&lt;&gt;(),<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      new ThreadFactoryBuilder().setNameFormat("BulkLoadHFilesTool-%1$d").setDaemon(true).build());<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    pool.allowCoreThreadTimeOut(true);<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    return pool;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>  }<a name="line.167"></a>
+<span class="sourceLineNo">168</span><a name="line.168"></a>
+<span class="sourceLineNo">169</span>  private boolean isCreateTable() {<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    return "yes".equalsIgnoreCase(getConf().get(CREATE_TABLE_CONF_KEY, "yes"));<a name="line.170"></a>
+<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
+<span class="sourceLineNo">172</span><a name="line.172"></a>
+<span class="sourceLineNo">173</span>  private boolean isSilence() {<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    return "yes".equalsIgnoreCase(getConf().get(IGNORE_UNMATCHED_CF_CONF_KEY, ""));<a name="line.174"></a>
+<span class="sourceLineNo">175</span>  }<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>  private boolean isAlwaysCopyFiles() {<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    return getConf().getBoolean(ALWAYS_COPY_FILES, false);<a name="line.178"></a>
+<span class="sourceLineNo">179</span>  }<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>  private static boolean shouldCopyHFileMetaKey(byte[] key) {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    // skip encoding to keep hfile meta consistent with data block info, see HBASE-15085<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    if (Bytes.equals(key, HFileDataBlockEncoder.DATA_BLOCK_ENCODING)) {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>      return false;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    }<a name="line.185"></a>
 <span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  private static boolean shouldCopyHFileMetaKey(byte[] key) {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    // skip encoding to keep hfile meta consistent with data block info, see HBASE-15085<a name="line.188"></a>
-<span class="sourceLineNo">189</span>    if (Bytes.equals(key, HFileDataBlockEncoder.DATA_BLOCK_ENCODING)) {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>      return false;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>    }<a name="line.191"></a>
-<span class="sourceLineNo">192</span><a name="line.192"></a>
-<span class="sourceLineNo">193</span>    return !HFileInfo.isReservedFileInfoKey(key);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>  }<a name="line.194"></a>
-<span class="sourceLineNo">195</span><a name="line.195"></a>
-<span class="sourceLineNo">196</span>  /**<a name="line.196"></a>
-<span class="sourceLineNo">197</span>   * Checks whether there is any invalid family name in HFiles to be bulk loaded.<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   */<a name="line.198"></a>
-<span class="sourceLineNo">199</span>  private static void validateFamiliesInHFiles(TableDescriptor tableDesc,<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      Deque&lt;LoadQueueItem&gt; queue, boolean silence) throws IOException {<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    Set&lt;String&gt; familyNames = Arrays.stream(tableDesc.getColumnFamilies())<a name="line.201"></a>
-<span class="sourceLineNo">202</span>      .map(ColumnFamilyDescriptor::getNameAsString).collect(Collectors.toSet());<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    List&lt;String&gt; unmatchedFamilies = queue.stream().map(item -&gt; Bytes.toString(item.getFamily()))<a name="line.203"></a>
-<span class="sourceLineNo">204</span>      .filter(fn -&gt; !familyNames.contains(fn)).distinct().collect(Collectors.toList());<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    if (unmatchedFamilies.size() &gt; 0) {<a name="line.205"></a>
-<span class="sourceLineNo">206</span>      String msg =<a name="line.206"></a>
-<span class="sourceLineNo">207</span>        "Unmatched family names found: unmatched family names in HFiles to be bulkloaded: " +<a name="line.207"></a>
-<span class="sourceLineNo">208</span>          unmatchedFamilies + "; valid family names of table " + tableDesc.getTableName() +<a name="line.208"></a>
-<span class="sourceLineNo">209</span>          " are: " + familyNames;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      LOG.error(msg);<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      if (!silence) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>        throw new IOException(msg);<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      }<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>  }<a name="line.215"></a>
-<span class="sourceLineNo">216</span><a name="line.216"></a>
-<span class="sourceLineNo">217</span>  /**<a name="line.217"></a>
-<span class="sourceLineNo">218</span>   * Populate the Queue with given HFiles<a name="line.218"></a>
-<span class="sourceLineNo">219</span>   */<a name="line.219"></a>
-<span class="sourceLineNo">220</span>  private static void populateLoadQueue(Deque&lt;LoadQueueItem&gt; ret, Map&lt;byte[], List&lt;Path&gt;&gt; map) {<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    map.forEach((k, v) -&gt; v.stream().map(p -&gt; new LoadQueueItem(k, p)).forEachOrdered(ret::add));<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  }<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  private interface BulkHFileVisitor&lt;TFamily&gt; {<a name="line.224"></a>
-<span class="sourceLineNo">225</span><a name="line.225"></a>
-<span class="sourceLineNo">226</span>    TFamily bulkFamily(byte[] familyName) throws IOException;<a name="line.226"></a>
-<span class="sourceLineNo">227</span><a name="line.227"></a>
-<span class="sourceLineNo">228</span>    void bulkHFile(TFamily family, FileStatus hfileStatus) throws IOException;<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  }<a name="line.229"></a>
-<span class="sourceLineNo">230</span><a name="line.230"></a>
-<span class="sourceLineNo">231</span>  /**<a name="line.231"></a>
-<span class="sourceLineNo">232</span>   * Iterate over the bulkDir hfiles. Skip reference, HFileLink, files starting with "_". Check and<a name="line.232"></a>
-<span class="sourceLineNo">233</span>   * skip non-valid hfiles by default, or skip this validation by setting {@link #VALIDATE_HFILES}<a name="line.233"></a>
-<span class="sourceLineNo">234</span>   * to false.<a name="line.234"></a>
-<span class="sourceLineNo">235</span>   */<a name="line.235"></a>
-<span class="sourceLineNo">236</span>  private static &lt;TFamily&gt; void visitBulkHFiles(FileSystem fs, Path bulkDir,<a name="line.236"></a>
-<span class="sourceLineNo">237</span>      BulkHFileVisitor&lt;TFamily&gt; visitor, boolean validateHFile) throws IOException {<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    FileStatus[] familyDirStatuses = fs.listStatus(bulkDir);<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    for (FileStatus familyStat : familyDirStatuses) {<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      if (!familyStat.isDirectory()) {<a name="line.240"></a>
-<span class="sourceLineNo">241</span>        LOG.warn("Skipping non-directory " + familyStat.getPath());<a name="line.241"></a>
-<span class="sourceLineNo">242</span>        continue;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      }<a name="line.243"></a>
-<span class="sourceLineNo">244</span>      Path familyDir = familyStat.getPath();<a name="line.244"></a>
-<span class="sourceLineNo">245</span>      byte[] familyName = Bytes.toBytes(familyDir.getName());<a name="line.245"></a>
-<span class="sourceLineNo">246</span>      // Skip invalid family<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      try {<a name="line.247"></a>
-<span class="sourceLineNo">248</span>        ColumnFamilyDescriptorBuilder.isLegalColumnFamilyName(familyName);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>      } catch (IllegalArgumentException e) {<a name="line.249"></a>
-<span class="sourceLineNo">250</span>        LOG.warn("Skipping invalid " + familyStat.getPath());<a name="line.250"></a>
-<span class="sourceLineNo">251</span>        continue;<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      }<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      TFamily family = visitor.bulkFamily(familyName);<a name="line.253"></a>
-<span class="sourceLineNo">254</span><a name="line.254"></a>
-<span class="sourceLineNo">255</span>      FileStatus[] hfileStatuses = fs.listStatus(familyDir);<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      for (FileStatus hfileStatus : hfileStatuses) {<a name="line.256"></a>
-<span class="sourceLineNo">257</span>        if (!fs.isFile(hfileStatus.getPath())) {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>          LOG.warn("Skipping non-file " + hfileStatus);<a name="line.258"></a>
-<span class="sourceLineNo">259</span>          continue;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        }<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>        Path hfile = hfileStatus.getPath();<a name="line.262"></a>
-<span class="sourceLineNo">263</span>        // Skip "_", reference, HFileLink<a name="line.263"></a>
-<span class="sourceLineNo">264</span>        String fileName = hfile.getName();<a name="line.264"></a>
-<span class="sourceLineNo">265</span>        if (fileName.startsWith("_")) {<a name="line.265"></a>
-<span class="sourceLineNo">266</span>          continue;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        }<a name="line.267"></a>
-<span class="sourceLineNo">268</span>        if (StoreFileInfo.isReference(fileName)) {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>          LOG.warn("Skipping reference " + fileName);<a name="line.269"></a>
-<span class="sourceLineNo">270</span>          continue;<a name="line.270"></a>
-<span class="sourceLineNo">271</span>        }<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        if (HFileLink.isHFileLink(fileName)) {<a name="line.272"></a>
-<span class="sourceLineNo">273</span>          LOG.warn("Skipping HFileLink " + fileName);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>          continue;<a name="line.274"></a>
-<span class="sourceLineNo">275</span>        }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>        // Validate HFile Format if needed<a name="line.277"></a>
-<span class="sourceLineNo">278</span>        if (validateHFile) {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>          try {<a name="line.279"></a>
-<span class="sourceLineNo">280</span>            if (!HFile.isHFileFormat(fs, hfile)) {<a name="line.280"></a>
-<span class="sourceLineNo">281</span>              LOG.warn("the file " + hfile + " doesn't seems to be an hfile. skipping");<a name="line.281"></a>
-<span class="sourceLineNo">282</span>              continue;<a name="line.282"></a>
-<span class="sourceLineNo">283</span>            }<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          } catch (FileNotFoundException e) {<a name="line.284"></a>
-<span class="sourceLineNo">285</span>            LOG.warn("the file " + hfile + " was removed");<a name="line.285"></a>
-<span class="sourceLineNo">286</span>            continue;<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          }<a name="line.287"></a>
-<span class="sourceLineNo">288</span>        }<a name="line.288"></a>
-<span class="sourceLineNo">289</span><a name="line.289"></a>
-<span class="sourceLineNo">290</span>        visitor.bulkHFile(family, hfileStatus);<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      }<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    }<a name="line.292"></a>
-<span class="sourceLineNo">293</span>  }<a name="line.293"></a>
-<span class="sourceLineNo">294</span><a name="line.294"></a>
-<span class="sourceLineNo">295</span>  /**<a name="line.295"></a>
-<span class="sourceLineNo">296</span>   * Walk the given directory for all HFiles, and return a Queue containing all such files.<a name="line.296"></a>
-<span class="sourceLineNo">297</span>   */<a name="line.297"></a>
-<span class="sourceLineNo">298</span>  private static void discoverLoadQueue(Configuration conf, Deque&lt;LoadQueueItem&gt; ret, Path hfofDir,<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      boolean validateHFile) throws IOException {<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    visitBulkHFiles(hfofDir.getFileSystem(conf), hfofDir, new BulkHFileVisitor&lt;byte[]&gt;() {<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      @Override<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      public byte[] bulkFamily(final byte[] familyName) {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>        return familyName;<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      }<a name="line.304"></a>
-<span class="sourceLineNo">305</span><a name="line.305"></a>
-<span class="sourceLineNo">306</span>      @Override<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      public void bulkHFile(final byte[] family, final FileStatus hfile) {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>        long length = hfile.getLen();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>        if (length &gt; conf.getLong(HConstants.HREGION_MAX_FILESIZE,<a name="line.309"></a>
-<span class="sourceLineNo">310</span>          HConstants.DEFAULT_MAX_FILE_SIZE)) {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>          LOG.warn("Trying to bulk load hfile " + hfile.getPath() + " with size: " + length +<a name="line.311"></a>
-<span class="sourceLineNo">312</span>            " bytes can be problematic as it may lead to oversplitting.");<a name="line.312"></a>
-<span class="sourceLineNo">313</span>        }<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        ret.add(new LoadQueueItem(family, hfile.getPath()));<a name="line.314"></a>
-<span class="sourceLineNo">315</span>      }<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    }, validateHFile);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>  }<a name="line.317"></a>
-<span class="sourceLineNo">318</span><a name="line.318"></a>
-<span class="sourceLineNo">319</span>  /**<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   * Prepare a collection of {@code LoadQueueItem} from list of source hfiles contained in the<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * passed directory and validates whether the prepared queue has all the valid table column<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * families in it.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * @param map map of family to List of hfiles<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * @param tableName table to which hfiles should be loaded<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * @param queue queue which needs to be loaded into the table<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * @param silence true to ignore unmatched column families<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * @throws IOException If any I/O or network error occurred<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   */<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  public static void prepareHFileQueue(AsyncClusterConnection conn, TableName tableName,<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      Map&lt;byte[], List&lt;Path&gt;&gt; map, Deque&lt;LoadQueueItem&gt; queue, boolean silence) throws IOException {<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    populateLoadQueue(queue, map);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    validateFamiliesInHFiles(FutureUtils.get(conn.getAdmin().getDescriptor(tableName)), queue,<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      silence);<a name="line.333"></a>
-<span class="sourceLineNo">334</span>  }<a name="line.334"></a>
-<span class="sourceLineNo">335</span><a name="line.335"></a>
-<span class="sourceLineNo">336</span>  /**<a name="line.336"></a>
-<span class="sourceLineNo">337</span>   * Prepare a collection of {@code LoadQueueItem} from list of source hfiles contained in the<a name="line.337"></a>
-<span class="sourceLineNo">338</span>   * passed directory and validates whether the prepared queue has all the valid table column<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * families in it.<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   * @param hfilesDir directory containing list of hfiles to be loaded into the table<a name="line.340"></a>
-<span class="sourceLineNo">341</span>   * @param queue queue which needs to be loaded into the table<a name="line.341"></a>
-<span class="sourceLineNo">342</span>   * @param validateHFile if true hfiles will be validated for its format<a name="line.342"></a>
-<span class="sourceLineNo">343</span>   * @param silence true to ignore unmatched column families<a name="line.343"></a>
-<span class="sourceLineNo">344</span>   * @throws IOException If any I/O or network error occurred<a name="line.344"></a>
-<span class="sourceLineNo">345</span>   */<a name="line.345"></a>
-<span class="sourceLineNo">346</span>  public static void prepareHFileQueue(Configuration conf, AsyncClusterConnection conn,<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      TableName tableName, Path hfilesDir, Deque&lt;LoadQueueItem&gt; queue, boolean validateHFile,<a name="line.347"></a>
-<span class="sourceLineNo">348</span>      boolean silence) throws IOException {<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    discoverLoadQueue(conf, queue, hfilesDir, validateHFile);<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    validateFamiliesInHFiles(FutureUtils.get(conn.getAdmin().getDescriptor(tableName)), queue,<a name="line.350"></a>
-<span class="sourceLineNo">351</span>      silence);<a name="line.351"></a>
-<span class="sourceLineNo">352</span>  }<a name="line.352"></a>
-<span class="sourceLineNo">353</span><a name="line.353"></a>
-<span class="sourceLineNo">354</span>  /**<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * Used by the replication sink to load the hfiles from the source cluster. It does the following,<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   * &lt;ol&gt;<a name="line.356"></a>
-<span class="sourceLineNo">357</span>   * &lt;li&gt;{@link #groupOrSplitPhase(AsyncClusterConnection, TableName, ExecutorService, Deque, List)}<a name="line.357"></a>
-<span class="sourceLineNo">358</span>   * &lt;/li&gt;<a name="line.358"></a>
-<span class="sourceLineNo">359</span>   * &lt;li&gt;{@link #bulkLoadPhase(AsyncClusterConnection, TableName, Deque, Multimap, boolean, Map)}<a name="line.359"></a>
-<span class="sourceLineNo">360</span>   * &lt;/li&gt;<a name="line.360"></a>
-<span class="sourceLineNo">361</span>   * &lt;/ol&gt;<a name="line.361"></a>
-<span class="sourceLineNo">362</span>   * @param conn Connection to use<a name="line.362"></a>
-<span class="sourceLineNo">363</span>   * @param tableName Table to which these hfiles should be loaded to<a name="line.363"></a>
-<span class="sourceLineNo">364</span>   * @param queue {@code LoadQueueItem} has hfiles yet to be loaded<a name="line.364"></a>
-<span class="sourceLineNo">365</span>   */<a name="line.365"></a>
-<span class="sourceLineNo">366</span>  public void loadHFileQueue(AsyncClusterConnection conn, TableName tableName,<a name="line.366"></a>
-<span class="sourceLineNo">367</span>      Deque&lt;LoadQueueItem&gt; queue, boolean copyFiles) throws IOException {<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    ExecutorService pool = createExecutorService();<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    try {<a name="line.369"></a>
-<span class="sourceLineNo">370</span>      Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups = groupOrSplitPhase(conn, tableName, pool,<a name="line.370"></a>
-<span class="sourceLineNo">371</span>        queue, FutureUtils.get(conn.getRegionLocator(tableName).getStartEndKeys())).getFirst();<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      bulkLoadPhase(conn, tableName, queue, regionGroups, copyFiles, null);<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    } finally {<a name="line.373"></a>
-<span class="sourceLineNo">374</span>      pool.shutdown();<a name="line.374"></a>
-<span class="sourceLineNo">375</span>    }<a name="line.375"></a>
-<span class="sourceLineNo">376</span>  }<a name="line.376"></a>
-<span class="sourceLineNo">377</span><a name="line.377"></a>
-<span class="sourceLineNo">378</span>  /**<a name="line.378"></a>
-<span class="sourceLineNo">379</span>   * Attempts to do an atomic load of many hfiles into a region. If it fails, it returns a list of<a name="line.379"></a>
-<span class="sourceLineNo">380</span>   * hfiles that need to be retried. If it is successful it will return an empty list. NOTE: To<a name="line.380"></a>
-<span class="sourceLineNo">381</span>   * maintain row atomicity guarantees, region server side should succeed atomically and fails<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   * atomically.<a name="line.382"></a>
-<span class="sourceLineNo">383</span>   * @param conn Connection to use<a name="line.383"></a>
-<span class="sourceLineNo">384</span>   * @param tableName Table to which these hfiles should be loaded to<a name="line.384"></a>
-<span class="sourceLineNo">385</span>   * @param copyFiles whether replicate to peer cluster while bulkloading<a name="line.385"></a>
-<span class="sourceLineNo">386</span>   * @param first the start key of region<a name="line.386"></a>
-<span class="sourceLineNo">387</span>   * @param lqis hfiles should be loaded<a name="line.387"></a>
-<span class="sourceLineNo">388</span>   * @return empty list if success, list of items to retry on recoverable failure<a name="line.388"></a>
-<span class="sourceLineNo">389</span>   */<a name="line.389"></a>
-<span class="sourceLineNo">390</span>  @VisibleForTesting<a name="line.390"></a>
-<span class="sourceLineNo">391</span>  protected CompletableFuture&lt;Collection&lt;LoadQueueItem&gt;&gt; tryAtomicRegionLoad(<a name="line.391"></a>
-<span class="sourceLineNo">392</span>      final AsyncClusterConnection conn, final TableName tableName, boolean copyFiles,<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      final byte[] first, Collection&lt;LoadQueueItem&gt; lqis) {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>    List&lt;Pair&lt;byte[], String&gt;&gt; familyPaths =<a name="line.394"></a>
-<span class="sourceLineNo">395</span>        lqis.stream().map(lqi -&gt; Pair.newPair(lqi.getFamily(), lqi.getFilePath().toString()))<a name="line.395"></a>
-<span class="sourceLineNo">396</span>            .collect(Collectors.toList());<a name="line.396"></a>
-<span class="sourceLineNo">397</span>    CompletableFuture&lt;Collection&lt;LoadQueueItem&gt;&gt; future = new CompletableFuture&lt;&gt;();<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    FutureUtils<a name="line.398"></a>
-<span class="sourceLineNo">399</span>        .addListener(<a name="line.399"></a>
-<span class="sourceLineNo">400</span>          conn.bulkLoad(tableName, familyPaths, first, assignSeqIds,<a name="line.400"></a>
-<span class="sourceLineNo">401</span>            fsDelegationToken.getUserToken(), bulkToken, copyFiles, clusterIds, replicate),<a name="line.401"></a>
-<span class="sourceLineNo">402</span>          (loaded, error) -&gt; {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>            if (error != null) {<a name="line.403"></a>
-<span class="sourceLineNo">404</span>              LOG.error("Encountered unrecoverable error from region server", error);<a name="line.404"></a>
-<span class="sourceLineNo">405</span>              if (getConf().getBoolean(RETRY_ON_IO_EXCEPTION, false)<a name="line.405"></a>
-<span class="sourceLineNo">406</span>                  &amp;&amp; numRetries.get() &lt; getConf().getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.406"></a>
-<span class="sourceLineNo">407</span>                    HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER)) {<a name="line.407"></a>
-<span class="sourceLineNo">408</span>                LOG.warn("Will attempt to retry loading failed HFiles. Retry #"<a name="line.408"></a>
-<span class="sourceLineNo">409</span>                    + numRetries.incrementAndGet());<a name="line.409"></a>
-<span class="sourceLineNo">410</span>                // return lqi's to retry<a name="line.410"></a>
-<span class="sourceLineNo">411</span>                future.complete(lqis);<a name="line.411"></a>
-<span class="sourceLineNo">412</span>              } else {<a name="line.412"></a>
-<span class="sourceLineNo">413</span>                LOG.error(RETRY_ON_IO_EXCEPTION<a name="line.413"></a>
-<span class="sourceLineNo">414</span>                    + " is disabled or we have reached retry limit. Unable to recover");<a name="line.414"></a>
-<span class="sourceLineNo">415</span>                future.completeExceptionally(error);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>              }<a name="line.416"></a>
-<span class="sourceLineNo">417</span>            } else {<a name="line.417"></a>
-<span class="sourceLineNo">418</span>              if (loaded) {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>                future.complete(Collections.emptyList());<a name="line.419"></a>
-<span class="sourceLineNo">420</span>              } else {<a name="line.420"></a>
-<span class="sourceLineNo">421</span>                LOG.warn("Attempt to bulk load region containing " + Bytes.toStringBinary(first)<a name="line.421"></a>
-<span class="sourceLineNo">422</span>                    + " into table " + tableName + " with files " + lqis<a name="line.422"></a>
-<span class="sourceLineNo">423</span>                    + " failed.  This is recoverable and they will be retried.");<a name="line.423"></a>
-<span class="sourceLineNo">424</span>                // return lqi's to retry<a name="line.424"></a>
-<span class="sourceLineNo">425</span>                future.complete(lqis);<a name="line.425"></a>
-<span class="sourceLineNo">426</span>              }<a name="line.426"></a>
-<span class="sourceLineNo">427</span>            }<a name="line.427"></a>
-<span class="sourceLineNo">428</span>          });<a name="line.428"></a>
-<span class="sourceLineNo">429</span>    return future;<a name="line.429"></a>
-<span class="sourceLineNo">430</span>  }<a name="line.430"></a>
-<span class="sourceLineNo">431</span><a name="line.431"></a>
-<span class="sourceLineNo">432</span>  /**<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   * This takes the LQI's grouped by likely regions and attempts to bulk load them. Any failures are<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   * re-queued for another pass with the groupOrSplitPhase.<a name="line.434"></a>
-<span class="sourceLineNo">435</span>   * &lt;p/&gt;<a name="line.435"></a>
-<span class="sourceLineNo">436</span>   * protected for testing.<a name="line.436"></a>
-<span class="sourceLineNo">437</span>   */<a name="line.437"></a>
-<span class="sourceLineNo">438</span>  @VisibleForTesting<a name="line.438"></a>
-<span class="sourceLineNo">439</span>  protected void bulkLoadPhase(AsyncClusterConnection conn, TableName tableName,<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      Deque&lt;LoadQueueItem&gt; queue, Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups,<a name="line.440"></a>
-<span class="sourceLineNo">441</span>      boolean copyFiles, Map&lt;LoadQueueItem, ByteBuffer&gt; item2RegionMap) throws IOException {<a name="line.441"></a>
-<span class="sourceLineNo">442</span>    // atomically bulk load the groups.<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    List&lt;Future&lt;Collection&lt;LoadQueueItem&gt;&gt;&gt; loadingFutures = new ArrayList&lt;&gt;();<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    for (Entry&lt;ByteBuffer, ? extends Collection&lt;LoadQueueItem&gt;&gt; entry : regionGroups.asMap()<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        .entrySet()) {<a name="line.445"></a>
-<span class="sourceLineNo">446</span>      byte[] first = entry.getKey().array();<a name="line.446"></a>
-<span class="sourceLineNo">447</span>      final Collection&lt;LoadQueueItem&gt; lqis = entry.getValue();<a name="line.447"></a>
-<span class="sourceLineNo">448</span>      if (bulkLoadByFamily) {<a name="line.448"></a>
-<span class="sourceLineNo">449</span>        groupByFamilies(lqis).values().forEach(familyQueue -&gt; loadingFutures<a name="line.449"></a>
-<span class="sourceLineNo">450</span>            .add(tryAtomicRegionLoad(conn, tableName, copyFiles, first, familyQueue)));<a name="line.450"></a>
-<span class="sourceLineNo">451</span>      } else {<a name="line.451"></a>
-<span class="sourceLineNo">452</span>        loadingFutures.add(tryAtomicRegionLoad(conn, tableName, copyFiles, first, lqis));<a name="line.452"></a>
-<span class="sourceLineNo">453</span>      }<a name="line.453"></a>
-<span class="sourceLineNo">454</span>      if (item2RegionMap != null) {<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        for (LoadQueueItem lqi : lqis) {<a name="line.455"></a>
-<span class="sourceLineNo">456</span>          item2RegionMap.put(lqi, entry.getKey());<a name="line.456"></a>
-<span class="sourceLineNo">457</span>        }<a name="line.457"></a>
-<span class="sourceLineNo">458</span>      }<a name="line.458"></a>
-<span class="sourceLineNo">459</span>    }<a name="line.459"></a>
-<span class="sourceLineNo">460</span><a name="line.460"></a>
-<span class="sourceLineNo">461</span>    // get all the results.<a name="line.461"></a>
-<span class="sourceLineNo">462</span>    for (Future&lt;Collection&lt;LoadQueueItem&gt;&gt; future : loadingFutures) {<a name="line.462"></a>
-<span class="sourceLineNo">463</span>      try {<a name="line.463"></a>
-<span class="sourceLineNo">464</span>        Collection&lt;LoadQueueItem&gt; toRetry = future.get();<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>        if (item2RegionMap != null) {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>          for (LoadQueueItem lqi : toRetry) {<a name="line.467"></a>
-<span class="sourceLineNo">468</span>            item2RegionMap.remove(lqi);<a name="line.468"></a>
-<span class="sourceLineNo">469</span>          }<a name="line.469"></a>
-<span class="sourceLineNo">470</span>        }<a name="line.470"></a>
-<span class="sourceLineNo">471</span>        // LQIs that are requeued to be regrouped.<a name="line.471"></a>
-<span class="sourceLineNo">472</span>        queue.addAll(toRetry);<a name="line.472"></a>
-<span class="sourceLineNo">473</span>      } catch (ExecutionException e1) {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>        Throwable t = e1.getCause();<a name="line.474"></a>
-<span class="sourceLineNo">475</span>        if (t instanceof IOException) {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>          // At this point something unrecoverable has happened.<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          // TODO Implement bulk load recovery<a name="line.477"></a>
-<span class="sourceLineNo">478</span>          throw new IOException("BulkLoad encountered an unrecoverable problem", t);<a name="line.478"></a>
-<span class="sourceLineNo">479</span>        }<a name="line.479"></a>
-<span class="sourceLineNo">480</span>        LOG.error("Unexpected execution exception during bulk load", e1);<a name="line.480"></a>
-<span class="sourceLineNo">481</span>        throw new IllegalStateException(t);<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      } catch (InterruptedException e1) {<a name="line.482"></a>
-<span class="sourceLineNo">483</span>        LOG.error("Unexpected interrupted exception during bulk load", e1);<a name="line.483"></a>
-<span class="sourceLineNo">484</span>        throw (InterruptedIOException) new InterruptedIOException().initCause(e1);<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      }<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>  }<a name="line.487"></a>
-<span class="sourceLineNo">488</span><a name="line.488"></a>
-<span class="sourceLineNo">489</span>  private Map&lt;byte[], Collection&lt;LoadQueueItem&gt;&gt;<a name="line.489"></a>
-<span class="sourceLineNo">490</span>      groupByFamilies(Collection&lt;LoadQueueItem&gt; itemsInRegion) {<a name="line.490"></a>
-<span class="sourceLineNo">491</span>    Map&lt;byte[], Collection&lt;LoadQueueItem&gt;&gt; families2Queue = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.491"></a>
-<span class="sourceLineNo">492</span>    itemsInRegion.forEach(item -&gt; families2Queue<a name="line.492"></a>
-<span class="sourceLineNo">493</span>        .computeIfAbsent(item.getFamily(), queue -&gt; new ArrayList&lt;&gt;()).add(item));<a name="line.493"></a>
-<span class="sourceLineNo">494</span>    return families2Queue;<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  }<a name="line.495"></a>
-<span class="sourceLineNo">496</span><a name="line.496"></a>
-<span class="sourceLineNo">497</span>  private boolean checkHFilesCountPerRegionPerFamily(<a name="line.497"></a>
-<span class="sourceLineNo">498</span>      final Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups) {<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    for (Map.Entry&lt;ByteBuffer, Collection&lt;LoadQueueItem&gt;&gt; e : regionGroups.asMap().entrySet()) {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      Map&lt;byte[], MutableInt&gt; filesMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.500"></a>
-<span class="sourceLineNo">501</span>      for (LoadQueueItem lqi : e.getValue()) {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>        MutableInt count = filesMap.computeIfAbsent(lqi.getFamily(), k -&gt; new MutableInt());<a name="line.502"></a>
-<span class="sourceLineNo">503</span>        count.increment();<a name="line.503"></a>
-<span class="sourceLineNo">504</span>        if (count.intValue() &gt; maxFilesPerRegionPerFamily) {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>          LOG.error("Trying to load more than " + maxFilesPerRegionPerFamily +<a name="line.505"></a>
-<span class="sourceLineNo">506</span>            " hfiles to family " + Bytes.toStringBinary(lqi.getFamily()) +<a name="line.506"></a>
-<span class="sourceLineNo">507</span>            " of region with start key " + Bytes.toStringBinary(e.getKey()));<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          return false;<a name="line.508"></a>
-<span class="sourceLineNo">509</span>        }<a name="line.509"></a>
-<span class="sourceLineNo">510</span>      }<a name="line.510"></a>
-<span class="sourceLineNo">511</span>    }<a name="line.511"></a>
-<span class="sourceLineNo">512</span>    return true;<a name="line.512"></a>
-<span class="sourceLineNo">513</span>  }<a name="line.513"></a>
-<span class="sourceLineNo">514</span><a name="line.514"></a>
-<span class="sourceLineNo">515</span>  /**<a name="line.515"></a>
-<span class="sourceLineNo">516</span>   * @param conn the HBase cluster connection<a name="line.516"></a>
-<span class="sourceLineNo">517</span>   * @param tableName the table name of the table to load into<a name="line.517"></a>
-<span class="sourceLineNo">518</span>   * @param pool the ExecutorService<a name="line.518"></a>
-<span class="sourceLineNo">519</span>   * @param queue the queue for LoadQueueItem<a name="line.519"></a>
-<span class="sourceLineNo">520</span>   * @param startEndKeys start and end keys<a name="line.520"></a>
-<span class="sourceLineNo">521</span>   * @return A map that groups LQI by likely bulk load region targets and Set of missing hfiles.<a name="line.521"></a>
-<span class="sourceLineNo">522</span>   */<a name="line.522"></a>
-<span class="sourceLineNo">523</span>  private Pair&lt;Multimap&lt;ByteBuffer, LoadQueueItem&gt;, Set&lt;String&gt;&gt; groupOrSplitPhase(<a name="line.523"></a>
-<span class="sourceLineNo">524</span>      AsyncClusterConnection conn, TableName tableName, ExecutorService pool,<a name="line.524"></a>
-<span class="sourceLineNo">525</span>      Deque&lt;LoadQueueItem&gt; queue, List&lt;Pair&lt;byte[], byte[]&gt;&gt; startEndKeys) throws IOException {<a name="line.525"></a>
-<span class="sourceLineNo">526</span>    // &lt;region start key, LQI&gt; need synchronized only within this scope of this<a name="line.526"></a>
-<span class="sourceLineNo">527</span>    // phase because of the puts that happen in futures.<a name="line.527"></a>
-<span class="sourceLineNo">528</span>    Multimap&lt;ByteBuffer, LoadQueueItem&gt; rgs = HashMultimap.create();<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    final Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups = Multimaps.synchronizedMultimap(rgs);<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    Set&lt;String&gt; missingHFiles = new HashSet&lt;&gt;();<a name="line.530"></a>
-<span class="sourceLineNo">531</span>    Pair&lt;Multimap&lt;ByteBuffer, LoadQueueItem&gt;, Set&lt;String&gt;&gt; pair =<a name="line.531"></a>
-<span class="sourceLineNo">532</span>      new Pair&lt;&gt;(regionGroups, missingHFiles);<a name="line.532"></a>
-<span class="sourceLineNo">533</span><a name="line.533"></a>
-<span class="sourceLineNo">534</span>    // drain LQIs and figure out bulk load groups<a name="line.534"></a>
-<span class="sourceLineNo">535</span>    Set&lt;Future&lt;Pair&lt;List&lt;LoadQueueItem&gt;, String&gt;&gt;&gt; splittingFutures = new HashSet&lt;&gt;();<a name="line.535"></a>
-<span class="sourceLineNo">536</span>    while (!queue.isEmpty()) {<a name="line.536"></a>
-<span class="sourceLineNo">537</span>      final LoadQueueItem item = queue.remove();<a name="line.537"></a>
-<span class="sourceLineNo">538</span><a name="line.538"></a>
-<span class="sourceLineNo">539</span>      final Callable&lt;Pair&lt;List&lt;LoadQueueItem&gt;, String&gt;&gt; call =<a name="line.539"></a>
-<span class="sourceLineNo">540</span>        () -&gt; groupOrSplit(conn, tableName, regionGroups, item, startEndKeys);<a name="line.540"></a>
-<span class="sourceLineNo">541</span>      splittingFutures.add(pool.submit(call));<a name="line.541"></a>
-<span class="sourceLineNo">542</span>    }<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    // get all the results. All grouping and splitting must finish before<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    // we can attempt the atomic loads.<a name="line.544"></a>
-<span class="sourceLineNo">545</span>    for (Future&lt;Pair&lt;List&lt;LoadQueueItem&gt;, String&gt;&gt; lqis : splittingFutures) {<a name="line.545"></a>
-<span class="sourceLineNo">546</span>      try {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>        Pair&lt;List&lt;LoadQueueItem&gt;, String&gt; splits = lqis.get();<a name="line.547"></a>
-<span class="sourceLineNo">548</span>        if (splits != null) {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>          if (splits.getFirst() != null) {<a name="line.549"></a>
-<span class="sourceLineNo">550</span>            queue.addAll(splits.getFirst());<a name="line.550"></a>
-<span class="sourceLineNo">551</span>          } else {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>            missingHFiles.add(splits.getSecond());<a name="line.552"></a>
-<span class="sourceLineNo">553</span>          }<a name="line.553"></a>
+<span class="sourceLineNo">187</span>    return !HFileInfo.isReservedFileInfoKey(key);<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  }<a name="line.188"></a>
+<span class="sourceLineNo">189</span><a name="line.189"></a>
+<span class="sourceLineNo">190</span>  /**<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * Checks whether there is any invalid family name in HFiles to be bulk loaded.<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   */<a name="line.192"></a>
+<span class="sourceLineNo">193</span>  private static void validateFamiliesInHFiles(TableDescriptor tableDesc,<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      Deque&lt;LoadQueueItem&gt; queue, boolean silence) throws IOException {<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    Set&lt;String&gt; familyNames = Arrays.stream(tableDesc.getColumnFamilies())<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      .map(ColumnFamilyDescriptor::getNameAsString).collect(Collectors.toSet());<a name="line.196"></a>
+<span class="sourceLineNo">197</span>    List&lt;String&gt; unmatchedFamilies = queue.stream().map(item -&gt; Bytes.toString(item.getFamily()))<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      .filter(fn -&gt; !familyNames.contains(fn)).distinct().collect(Collectors.toList());<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    if (unmatchedFamilies.size() &gt; 0) {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      String msg =<a name="line.200"></a>
+<span class="sourceLineNo">201</span>        "Unmatched family names found: unmatched family names in HFiles to be bulkloaded: " +<a name="line.201"></a>
+<span class="sourceLineNo">202</span>          unmatchedFamilies + "; valid family names of table " + tableDesc.getTableName() +<a name="line.202"></a>
+<span class="sourceLineNo">203</span>          " are: " + familyNames;<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      LOG.error(msg);<a name="line.204"></a>
+<span class="sourceLineNo">205</span>      if (!silence) {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>        throw new IOException(msg);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      }<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    }<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  }<a name="line.209"></a>
+<span class="sourceLineNo">210</span><a name="line.210"></a>
+<span class="sourceLineNo">211</span>  /**<a name="line.211"></a>
+<span class="sourceLineNo">212</span>   * Populate the Queue with given HFiles<a name="line.212"></a>
+<span class="sourceLineNo">213</span>   */<a name="line.213"></a>
+<span class="sourceLineNo">214</span>  private static void populateLoadQueue(Deque&lt;LoadQueueItem&gt; ret, Map&lt;byte[], List&lt;Path&gt;&gt; map) {<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    map.forEach((k, v) -&gt; v.stream().map(p -&gt; new LoadQueueItem(k, p)).forEachOrdered(ret::add));<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  }<a name="line.216"></a>
+<span class="sourceLineNo">217</span><a name="line.217"></a>
+<span class="sourceLineNo">218</span>  private interface BulkHFileVisitor&lt;TFamily&gt; {<a name="line.218"></a>
+<span class="sourceLineNo">219</span><a name="line.219"></a>
+<span class="sourceLineNo">220</span>    TFamily bulkFamily(byte[] familyName) throws IOException;<a name="line.220"></a>
+<span class="sourceLineNo">221</span><a name="line.221"></a>
+<span class="sourceLineNo">222</span>    void bulkHFile(TFamily family, FileStatus hfileStatus) throws IOException;<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  }<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>  /**<a name="line.225"></a>
+<span class="sourceLineNo">226</span>   * Iterate over the bulkDir hfiles. Skip reference, HFileLink, files starting with "_". Check and<a name="line.226"></a>
+<span class="sourceLineNo">227</span>   * skip non-valid hfiles by default, or skip this validation by setting {@link #VALIDATE_HFILES}<a name="line.227"></a>
+<span class="sourceLineNo">228</span>   * to false.<a name="line.228"></a>
+<span class="sourceLineNo">229</span>   */<a name="line.229"></a>
+<span class="sourceLineNo">230</span>  private static &lt;TFamily&gt; void visitBulkHFiles(FileSystem fs, Path bulkDir,<a name="line.230"></a>
+<span class="sourceLineNo">231</span>      BulkHFileVisitor&lt;TFamily&gt; visitor, boolean validateHFile) throws IOException {<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    FileStatus[] familyDirStatuses = fs.listStatus(bulkDir);<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    for (FileStatus familyStat : familyDirStatuses) {<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      if (!familyStat.isDirectory()) {<a name="line.234"></a>
+<span class="sourceLineNo">235</span>        LOG.warn("Skipping non-directory " + familyStat.getPath());<a name="line.235"></a>
+<span class="sourceLineNo">236</span>        continue;<a name="line.236"></a>
+<span class="sourceLineNo">237</span>      }<a name="line.237"></a>
+<span class="sourceLineNo">238</span>      Path familyDir = familyStat.getPath();<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      byte[] familyName = Bytes.toBytes(familyDir.getName());<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      // Skip invalid family<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      try {<a name="line.241"></a>
+<span class="sourceLineNo">242</span>        ColumnFamilyDescriptorBuilder.isLegalColumnFamilyName(familyName);<a name="line.242"></a>
+<span class="sourceLineNo">243</span>      } catch (IllegalArgumentException e) {<a name="line.243"></a>
+<span class="sourceLineNo">244</span>        LOG.warn("Skipping invalid " + familyStat.getPath());<a name="line.244"></a>
+<span class="sourceLineNo">245</span>        continue;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>      }<a name="line.246"></a>
+<span class="sourceLineNo">247</span>      TFamily family = visitor.bulkFamily(familyName);<a name="line.247"></a>
+<span class="sourceLineNo">248</span><a name="line.248"></a>
+<span class="sourceLineNo">249</span>      FileStatus[] hfileStatuses = fs.listStatus(familyDir);<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      for (FileStatus hfileStatus : hfileStatuses) {<a name="line.250"></a>
+<span class="sourceLineNo">251</span>        if (!fs.isFile(hfileStatus.getPath())) {<a name="line.251"></a>
+<span class="sourceLineNo">252</span>          LOG.warn("Skipping non-file " + hfileStatus);<a name="line.252"></a>
+<span class="sourceLineNo">253</span>          continue;<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        }<a name="line.254"></a>
+<span class="sourceLineNo">255</span><a name="line.255"></a>
+<span class="sourceLineNo">256</span>        Path hfile = hfileStatus.getPath();<a name="line.256"></a>
+<span class="sourceLineNo">257</span>        // Skip "_", reference, HFileLink<a name="line.257"></a>
+<span class="sourceLineNo">258</span>        String fileName = hfile.getName();<a name="line.258"></a>
+<span class="sourceLineNo">259</span>        if (fileName.startsWith("_")) {<a name="line.259"></a>
+<span class="sourceLineNo">260</span>          continue;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        }<a name="line.261"></a>
+<span class="sourceLineNo">262</span>        if (StoreFileInfo.isReference(fileName)) {<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          LOG.warn("Skipping reference " + fileName);<a name="line.263"></a>
+<span class="sourceLineNo">264</span>          continue;<a name="line.264"></a>
+<span class="sourceLineNo">265</span>        }<a name="line.265"></a>
+<span class="sourceLineNo">266</span>        if (HFileLink.isHFileLink(fileName)) {<a name="line.266"></a>
+<span class="sourceLineNo">267</span>          LOG.warn("Skipping HFileLink " + fileName);<a name="line.267"></a>
+<span class="sourceLineNo">268</span>          continue;<a name="line.268"></a>
+<span class="sourceLineNo">269</span>        }<a name="line.269"></a>
+<span class="sourceLineNo">270</span><a name="line.270"></a>
+<span class="sourceLineNo">271</span>        // Validate HFile Format if needed<a name="line.271"></a>
+<span class="sourceLineNo">272</span>        if (validateHFile) {<a name="line.272"></a>
+<span class="sourceLineNo">273</span>          try {<a name="line.273"></a>
+<span class="sourceLineNo">274</span>            if (!HFile.isHFileFormat(fs, hfile)) {<a name="line.274"></a>
+<span class="sourceLineNo">275</span>              LOG.warn("the file " + hfile + " doesn't seems to be an hfile. skipping");<a name="line.275"></a>
+<span class="sourceLineNo">276</span>              continue;<a name="line.276"></a>
+<span class="sourceLineNo">277</span>            }<a name="line.277"></a>
+<span class="sourceLineNo">278</span>          } catch (FileNotFoundException e) {<a name="line.278"></a>
+<span class="sourceLineNo">279</span>            LOG.warn("the file " + hfile + " was removed");<a name="line.279"></a>
+<span class="sourceLineNo">280</span>            continue;<a name="line.280"></a>
+<span class="sourceLineNo">281</span>          }<a name="line.281"></a>
+<span class="sourceLineNo">282</span>        }<a name="line.282"></a>
+<span class="sourceLineNo">283</span><a name="line.283"></a>
+<span class="sourceLineNo">284</span>        visitor.bulkHFile(family, hfileStatus);<a name="line.284"></a>
+<span class="sourceLineNo">285</span>      }<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    }<a name="line.286"></a>
+<span class="sourceLineNo">287</span>  }<a name="line.287"></a>
+<span class="sourceLineNo">288</span><a name="line.288"></a>
+<span class="sourceLineNo">289</span>  /**<a name="line.289"></a>
+<span class="sourceLineNo">290</span>   * Walk the given directory for all HFiles, and return a Queue containing all such files.<a name="line.290"></a>
+<span class="sourceLineNo">291</span>   */<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private static void discoverLoadQueue(Configuration conf, Deque&lt;LoadQueueItem&gt; ret, Path hfofDir,<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      boolean validateHFile) throws IOException {<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    visitBulkHFiles(hfofDir.getFileSystem(conf), hfofDir, new BulkHFileVisitor&lt;byte[]&gt;() {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      @Override<a name="line.295"></a>
+<span class="sourceLineNo">296</span>      public byte[] bulkFamily(final byte[] familyName) {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>        return familyName;<a name="line.297"></a>
+<span class="sourceLineNo">298</span>      }<a name="line.298"></a>
+<span class="sourceLineNo">299</span><a name="line.299"></a>
+<span class="sourceLineNo">300</span>      @Override<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      public void bulkHFile(final byte[] family, final FileStatus hfile) {<a name="line.301"></a>
+<span class="sourceLineNo">302</span>        long length = hfile.getLen();<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        if (length &gt; conf.getLong(HConstants.HREGION_MAX_FILESIZE,<a name="line.303"></a>
+<span class="sourceLineNo">304</span>          HConstants.DEFAULT_MAX_FILE_SIZE)) {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>          LOG.warn("Trying to bulk load hfile " + hfile.getPath() + " with size: " + length +<a name="line.305"></a>
+<span class="sourceLineNo">306</span>            " bytes can be problematic as it may lead to oversplitting.");<a name="line.306"></a>
+<span class="sourceLineNo">307</span>        }<a name="line.307"></a>
+<span class="sourceLineNo">308</span>        ret.add(new LoadQueueItem(family, hfile.getPath()));<a name="line.308"></a>
+<span class="sourceLineNo">309</span>      }<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    }, validateHFile);<a name="line.310"></a>
+<span class="sourceLineNo">311</span>  }<a name="line.311"></a>
+<span class="sourceLineNo">312</span><a name="line.312"></a>
+<span class="sourceLineNo">313</span>  /**<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   * Prepare a collection of {@code LoadQueueItem} from list of source hfiles contained in the<a name="line.314"></a>
+<span class="sourceLineNo">315</span>   * passed directory and validates whether the prepared queue has all the valid table column<a name="line.315"></a>
+<span class="sourceLineNo">316</span>   * families in it.<a name="line.316"></a>
+<span class="sourceLineNo">317</span>   * @param map map of family to List of hfiles<a name="line.317"></a>
+<span class="sourceLineNo">318</span>   * @param tableName table to which hfiles should be loaded<a name="line.318"></a>
+<span class="sourceLineNo">319</span>   * @param queue queue which needs to be loaded into the table<a name="line.319"></a>
+<span class="sourceLineNo">320</span>   * @param silence true to ignore unmatched column families<a name="line.320"></a>
+<span class="sourceLineNo">321</span>   * @throws IOException If any I/O or network error occurred<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   */<a name="line.322"></a>
+<span class="sourceLineNo">323</span>  public static void prepareHFileQueue(AsyncClusterConnection conn, TableName tableName,<a name="line.323"></a>
+<span class="sourceLineNo">324</span>      Map&lt;byte[], List&lt;Path&gt;&gt; map, Deque&lt;LoadQueueItem&gt; queue, boolean silence) throws IOException {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    populateLoadQueue(queue, map);<a name="line.325"></a>
+<span class="sourceLineNo">326</span>    validateFamiliesInHFiles(FutureUtils.get(conn.getAdmin().getDescriptor(tableName)), queue,<a name="line.326"></a>
+<span class="sourceLineNo">327</span>      silence);<a name="line.327"></a>
+<span class="sourceLineNo">328</span>  }<a name="line.328"></a>
+<span class="sourceLineNo">329</span><a name="line.329"></a>
+<span class="sourceLineNo">330</span>  /**<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   * Prepare a collection of {@code LoadQueueItem} from list of source hfiles contained in the<a name="line.331"></a>
+<span class="sourceLineNo">332</span>   * passed directory and validates whether the prepared queue has all the valid table column<a name="line.332"></a>
+<span class="sourceLineNo">333</span>   * families in it.<a name="line.333"></a>
+<span class="sourceLineNo">334</span>   * @param hfilesDir directory containing list of hfiles to be loaded into the table<a name="line.334"></a>
+<span class="sourceLineNo">335</span>   * @param queue queue which needs to be loaded into the table<a name="line.335"></a>
+<span class="sourceLineNo">336</span>   * @param validateHFile if true hfiles will be validated for its format<a name="line.336"></a>
+<span class="sourceLineNo">337</span>   * @param silence true to ignore unmatched column families<a name="line.337"></a>
+<span class="sourceLineNo">338</span>   * @throws IOException If any I/O or network error occurred<a name="line.338"></a>
+<span class="sourceLineNo">339</span>   */<a name="line.339"></a>
+<span class="sourceLineNo">340</span>  public static void prepareHFileQueue(Configuration conf, AsyncClusterConnection conn,<a name="line.340"></a>
+<span class="sourceLineNo">341</span>      TableName tableName, Path hfilesDir, Deque&lt;LoadQueueItem&gt; queue, boolean validateHFile,<a name="line.341"></a>
+<span class="sourceLineNo">342</span>      boolean silence) throws IOException {<a name="line.342"></a>
+<span class="sourceLineNo">343</span>    discoverLoadQueue(conf, queue, hfilesDir, validateHFile);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    validateFamiliesInHFiles(FutureUtils.get(conn.getAdmin().getDescriptor(tableName)), queue,<a name="line.344"></a>
+<span class="sourceLineNo">345</span>      silence);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>  }<a name="line.346"></a>
+<span class="sourceLineNo">347</span><a name="line.347"></a>
+<span class="sourceLineNo">348</span>  /**<a name="line.348"></a>
+<span class="sourceLineNo">349</span>   * Used by the replication sink to load the hfiles from the source cluster. It does the following,<a name="line.349"></a>
+<span class="sourceLineNo">350</span>   * &lt;ol&gt;<a name="line.350"></a>
+<span class="sourceLineNo">351</span>   * &lt;li&gt;{@link #groupOrSplitPhase(AsyncClusterConnection, TableName, ExecutorService, Deque, List)}<a name="line.351"></a>
+<span class="sourceLineNo">352</span>   * &lt;/li&gt;<a name="line.352"></a>
+<span class="sourceLineNo">353</span>   * &lt;li&gt;{@link #bulkLoadPhase(AsyncClusterConnection, TableName, Deque, Multimap, boolean, Map)}<a name="line.353"></a>
+<span class="sourceLineNo">354</span>   * &lt;/li&gt;<a name="line.354"></a>
+<span class="sourceLineNo">355</span>   * &lt;/ol&gt;<a name="line.355"></a>
+<span class="sourceLineNo">356</span>   * @param conn Connection to use<a name="line.356"></a>
+<span class="sourceLineNo">357</span>   * @param tableName Table to which these hfiles should be loaded to<a name="line.357"></a>
+<span class="sourceLineNo">358</span>   * @param queue {@code LoadQueueItem} has hfiles yet to be loaded<a name="line.358"></a>
+<span class="sourceLineNo">359</span>   */<a name="line.359"></a>
+<span class="sourceLineNo">360</span>  public void loadHFileQueue(AsyncClusterConnection conn, TableName tableName,<a name="line.360"></a>
+<span class="sourceLineNo">361</span>      Deque&lt;LoadQueueItem&gt; queue, boolean copyFiles) throws IOException {<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    ExecutorService pool = createExecutorService();<a name="line.362"></a>
+<span class="sourceLineNo">363</span>    try {<a name="line.363"></a>
+<span class="sourceLineNo">364</span>      Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups = groupOrSplitPhase(conn, tableName, pool,<a name="line.364"></a>
+<span class="sourceLineNo">365</span>        queue, FutureUtils.get(conn.getRegionLocator(tableName).getStartEndKeys())).getFirst();<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      bulkLoadPhase(conn, tableName, queue, regionGroups, copyFiles, null);<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    } finally {<a name="line.367"></a>
+<span class="sourceLineNo">368</span>      pool.shutdown();<a name="line.368"></a>
+<span class="sourceLineNo">369</span>    }<a name="line.369"></a>
+<span class="sourceLineNo">370</span>  }<a name="line.370"></a>
+<span class="sourceLineNo">371</span><a name="line.371"></a>
+<span class="sourceLineNo">372</span>  /**<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   * Attempts to do an atomic load of many hfiles into a region. If it fails, it returns a list of<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   * hfiles that need to be retried. If it is successful it will return an empty list. NOTE: To<a name="line.374"></a>
+<span class="sourceLineNo">375</span>   * maintain row atomicity guarantees, region server side should succeed atomically and fails<a name="line.375"></a>
+<span class="sourceLineNo">376</span>   * atomically.<a name="line.376"></a>
+<span class="sourceLineNo">377</span>   * @param conn Connection to use<a name="line.377"></a>
+<span class="sourceLineNo">378</span>   * @param tableName Table to which these hfiles should be loaded to<a name="line.378"></a>
+<span class="sourceLineNo">379</span>   * @param copyFiles whether replicate to peer cluster while bulkloading<a name="line.379"></a>
+<span class="sourceLineNo">380</span>   * @param first the start key of region<a name="line.380"></a>
+<span class="sourceLineNo">381</span>   * @param lqis hfiles should be loaded<a name="line.381"></a>
+<span class="sourceLineNo">382</span>   * @return empty list if success, list of items to retry on recoverable failure<a name="line.382"></a>
+<span class="sourceLineNo">383</span>   */<a name="line.383"></a>
+<span class="sourceLineNo">384</span>  @VisibleForTesting<a name="line.384"></a>
+<span class="sourceLineNo">385</span>  protected CompletableFuture&lt;Collection&lt;LoadQueueItem&gt;&gt; tryAtomicRegionLoad(<a name="line.385"></a>
+<span class="sourceLineNo">386</span>      final AsyncClusterConnection conn, final TableName tableName, boolean copyFiles,<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      final byte[] first, Collection&lt;LoadQueueItem&gt; lqis) {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>    List&lt;Pair&lt;byte[], String&gt;&gt; familyPaths =<a name="line.388"></a>
+<span class="sourceLineNo">389</span>        lqis.stream().map(lqi -&gt; Pair.newPair(lqi.getFamily(), lqi.getFilePath().toString()))<a name="line.389"></a>
+<span class="sourceLineNo">390</span>            .collect(Collectors.toList());<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    CompletableFuture&lt;Collection&lt;LoadQueueItem&gt;&gt; future = new CompletableFuture&lt;&gt;();<a name="line.391"></a>
+<span class="sourceLineNo">392</span>    FutureUtils<a name="line.392"></a>
+<span class="sourceLineNo">393</span>        .addListener(<a name="line.393"></a>
+<span class="sourceLineNo">394</span>          conn.bulkLoad(tableName, familyPaths, first, assignSeqIds,<a name="line.394"></a>
+<span class="sourceLineNo">395</span>            fsDelegationToken.getUserToken(), bulkToken, copyFiles, clusterIds, replicate),<a name="line.395"></a>
+<span class="sourceLineNo">396</span>          (loaded, error) -&gt; {<a name="line.396"></a>
+<span class="sourceLineNo">397</span>            if (error != null) {<a name="line.397"></a>
+<span class="sourceLineNo">398</span>              LOG.error("Encountered unrecoverable error from region server", error);<a name="line.398"></a>
+<span class="sourceLineNo">399</span>              if (getConf().getBoolean(RETRY_ON_IO_EXCEPTION, false)<a name="line.399"></a>
+<span class="sourceLineNo">400</span>                  &amp;&amp; numRetries.get() &lt; getConf().getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER,<a name="line.400"></a>
+<span class="sourceLineNo">401</span>                    HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER)) {<a name="line.401"></a>
+<span class="sourceLineNo">402</span>                LOG.warn("Will attempt to retry loading failed HFiles. Retry #"<a name="line.402"></a>
+<span class="sourceLineNo">403</span>                    + numRetries.incrementAndGet());<a name="line.403"></a>
+<span class="sourceLineNo">404</span>                // return lqi's to retry<a name="line.404"></a>
+<span class="sourceLineNo">405</span>                future.complete(lqis);<a name="line.405"></a>
+<span class="sourceLineNo">406</span>              } else {<a name="line.406"></a>
+<span class="sourceLineNo">407</span>                LOG.error(RETRY_ON_IO_EXCEPTION<a name="line.407"></a>
+<span class="sourceLineNo">408</span>                    + " is disabled or we have reached retry limit. Unable to recover");<a name="line.408"></a>
+<span class="sourceLineNo">409</span>                future.completeExceptionally(error);<a name="line.409"></a>
+<span class="sourceLineNo">410</span>              }<a name="line.410"></a>
+<span class="sourceLineNo">411</span>            } else {<a name="line.411"></a>
+<span class="sourceLineNo">412</span>              if (loaded) {<a name="line.412"></a>
+<span class="sourceLineNo">413</span>                future.complete(Collections.emptyList());<a name="line.413"></a>
+<span class="sourceLineNo">414</span>              } else {<a name="line.414"></a>
+<span class="sourceLineNo">415</span>                LOG.warn("Attempt to bulk load region containing " + Bytes.toStringBinary(first)<a name="line.415"></a>
+<span class="sourceLineNo">416</span>                    + " into table " + tableName + " with files " + lqis<a name="line.416"></a>
+<span class="sourceLineNo">417</span>                    + " failed.  This is recoverable and they will be retried.");<a name="line.417"></a>
+<span class="sourceLineNo">418</span>                // return lqi's to retry<a name="line.418"></a>
+<span class="sourceLineNo">419</span>                future.complete(lqis);<a name="line.419"></a>
+<span class="sourceLineNo">420</span>              }<a name="line.420"></a>
+<span class="sourceLineNo">421</span>            }<a name="line.421"></a>
+<span class="sourceLineNo">422</span>          });<a name="line.422"></a>
+<span class="sourceLineNo">423</span>    return future;<a name="line.423"></a>
+<span class="sourceLineNo">424</span>  }<a name="line.424"></a>
+<span class="sourceLineNo">425</span><a name="line.425"></a>
+<span class="sourceLineNo">426</span>  /**<a name="line.426"></a>
+<span class="sourceLineNo">427</span>   * This takes the LQI's grouped by likely regions and attempts to bulk load them. Any failures are<a name="line.427"></a>
+<span class="sourceLineNo">428</span>   * re-queued for another pass with the groupOrSplitPhase.<a name="line.428"></a>
+<span class="sourceLineNo">429</span>   * &lt;p/&gt;<a name="line.429"></a>
+<span class="sourceLineNo">430</span>   * protected for testing.<a name="line.430"></a>
+<span class="sourceLineNo">431</span>   */<a name="line.431"></a>
+<span class="sourceLineNo">432</span>  @VisibleForTesting<a name="line.432"></a>
+<span class="sourceLineNo">433</span>  protected void bulkLoadPhase(AsyncClusterConnection conn, TableName tableName,<a name="line.433"></a>
+<span class="sourceLineNo">434</span>      Deque&lt;LoadQueueItem&gt; queue, Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups,<a name="line.434"></a>
+<span class="sourceLineNo">435</span>      boolean copyFiles, Map&lt;LoadQueueItem, ByteBuffer&gt; item2RegionMap) throws IOException {<a name="line.435"></a>
+<span class="sourceLineNo">436</span>    // atomically bulk load the groups.<a name="line.436"></a>
+<span class="sourceLineNo">437</span>    List&lt;Future&lt;Collection&lt;LoadQueueItem&gt;&gt;&gt; loadingFutures = new ArrayList&lt;&gt;();<a name="line.437"></a>
+<span class="sourceLineNo">438</span>    for (Entry&lt;ByteBuffer, ? extends Collection&lt;LoadQueueItem&gt;&gt; entry : regionGroups.asMap()<a name="line.438"></a>
+<span class="sourceLineNo">439</span>        .entrySet()) {<a name="line.439"></a>
+<span class="sourceLineNo">440</span>      byte[] first = entry.getKey().array();<a name="line.440"></a>
+<span class="sourceLineNo">441</span>      final Collection&lt;LoadQueueItem&gt; lqis = entry.getValue();<a name="line.441"></a>
+<span class="sourceLineNo">442</span>      if (bulkLoadByFamily) {<a name="line.442"></a>
+<span class="sourceLineNo">443</span>        groupByFamilies(lqis).values().forEach(familyQueue -&gt; loadingFutures<a name="line.443"></a>
+<span class="sourceLineNo">444</span>            .add(tryAtomicRegionLoad(conn, tableName, copyFiles, first, familyQueue)));<a name="line.444"></a>
+<span class="sourceLineNo">445</span>      } else {<a name="line.445"></a>
+<span class="sourceLineNo">446</span>        loadingFutures.add(tryAtomicRegionLoad(conn, tableName, copyFiles, first, lqis));<a name="line.446"></a>
+<span class="sourceLineNo">447</span>      }<a name="line.447"></a>
+<span class="sourceLineNo">448</span>      if (item2RegionMap != null) {<a name="line.448"></a>
+<span class="sourceLineNo">449</span>        for (LoadQueueItem lqi : lqis) {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>          item2RegionMap.put(lqi, entry.getKey());<a name="line.450"></a>
+<span class="sourceLineNo">451</span>        }<a name="line.451"></a>
+<span class="sourceLineNo">452</span>      }<a name="line.452"></a>
+<span class="sourceLineNo">453</span>    }<a name="line.453"></a>
+<span class="sourceLineNo">454</span><a name="line.454"></a>
+<span class="sourceLineNo">455</span>    // get all the results.<a name="line.455"></a>
+<span class="sourceLineNo">456</span>    for (Future&lt;Collection&lt;LoadQueueItem&gt;&gt; future : loadingFutures) {<a name="line.456"></a>
+<span class="sourceLineNo">457</span>      try {<a name="line.457"></a>
+<span class="sourceLineNo">458</span>        Collection&lt;LoadQueueItem&gt; toRetry = future.get();<a name="line.458"></a>
+<span class="sourceLineNo">459</span><a name="line.459"></a>
+<span class="sourceLineNo">460</span>        if (item2RegionMap != null) {<a name="line.460"></a>
+<span class="sourceLineNo">461</span>          for (LoadQueueItem lqi : toRetry) {<a name="line.461"></a>
+<span class="sourceLineNo">462</span>            item2RegionMap.remove(lqi);<a name="line.462"></a>
+<span class="sourceLineNo">463</span>          }<a name="line.463"></a>
+<span class="sourceLineNo">464</span>        }<a name="line.464"></a>
+<span class="sourceLineNo">465</span>        // LQIs that are requeued to be regrouped.<a name="line.465"></a>
+<span class="sourceLineNo">466</span>        queue.addAll(toRetry);<a name="line.466"></a>
+<span class="sourceLineNo">467</span>      } catch (ExecutionException e1) {<a name="line.467"></a>
+<span class="sourceLineNo">468</span>        Throwable t = e1.getCause();<a name="line.468"></a>
+<span class="sourceLineNo">469</span>        if (t instanceof IOException) {<a name="line.469"></a>
+<span class="sourceLineNo">470</span>          // At this point something unrecoverable has happened.<a name="line.470"></a>
+<span class="sourceLineNo">471</span>          // TODO Implement bulk load recovery<a name="line.471"></a>
+<span class="sourceLineNo">472</span>          throw new IOException("BulkLoad encountered an unrecoverable problem", t);<a name="line.472"></a>
+<span class="sourceLineNo">473</span>        }<a name="line.473"></a>
+<span class="sourceLineNo">474</span>        LOG.error("Unexpected execution exception during bulk load", e1);<a name="line.474"></a>
+<span class="sourceLineNo">475</span>        throw new IllegalStateException(t);<a name="line.475"></a>
+<span class="sourceLineNo">476</span>      } catch (InterruptedException e1) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>        LOG.error("Unexpected interrupted exception during bulk load", e1);<a name="line.477"></a>
+<span class="sourceLineNo">478</span>        throw (InterruptedIOException) new InterruptedIOException().initCause(e1);<a name="line.478"></a>
+<span class="sourceLineNo">479</span>      }<a name="line.479"></a>
+<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
+<span class="sourceLineNo">481</span>  }<a name="line.481"></a>
+<span class="sourceLineNo">482</span><a name="line.482"></a>
+<span class="sourceLineNo">483</span>  private Map&lt;byte[], Collection&lt;LoadQueueItem&gt;&gt;<a name="line.483"></a>
+<span class="sourceLineNo">484</span>      groupByFamilies(Collection&lt;LoadQueueItem&gt; itemsInRegion) {<a name="line.484"></a>
+<span class="sourceLineNo">485</span>    Map&lt;byte[], Collection&lt;LoadQueueItem&gt;&gt; families2Queue = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.485"></a>
+<span class="sourceLineNo">486</span>    itemsInRegion.forEach(item -&gt; families2Queue<a name="line.486"></a>
+<span class="sourceLineNo">487</span>        .computeIfAbsent(item.getFamily(), queue -&gt; new ArrayList&lt;&gt;()).add(item));<a name="line.487"></a>
+<span class="sourceLineNo">488</span>    return families2Queue;<a name="line.488"></a>
+<span class="sourceLineNo">489</span>  }<a name="line.489"></a>
+<span class="sourceLineNo">490</span><a name="line.490"></a>
+<span class="sourceLineNo">491</span>  private boolean checkHFilesCountPerRegionPerFamily(<a name="line.491"></a>
+<span class="sourceLineNo">492</span>      final Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups) {<a name="line.492"></a>
+<span class="sourceLineNo">493</span>    for (Map.Entry&lt;ByteBuffer, Collection&lt;LoadQueueItem&gt;&gt; e : regionGroups.asMap().entrySet()) {<a name="line.493"></a>
+<span class="sourceLineNo">494</span>      Map&lt;byte[], MutableInt&gt; filesMap = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.494"></a>
+<span class="sourceLineNo">495</span>      for (LoadQueueItem lqi : e.getValue()) {<a name="line.495"></a>
+<span class="sourceLineNo">496</span>        MutableInt count = filesMap.computeIfAbsent(lqi.getFamily(), k -&gt; new MutableInt());<a name="line.496"></a>
+<span class="sourceLineNo">497</span>        count.increment();<a name="line.497"></a>
+<span class="sourceLineNo">498</span>        if (count.intValue() &gt; maxFilesPerRegionPerFamily) {<a name="line.498"></a>
+<span class="sourceLineNo">499</span>          LOG.error("Trying to load more than " + maxFilesPerRegionPerFamily +<a name="line.499"></a>
+<span class="sourceLineNo">500</span>            " hfiles to family " + Bytes.toStringBinary(lqi.getFamily()) +<a name="line.500"></a>
+<span class="sourceLineNo">501</span>            " of region with start key " + Bytes.toStringBinary(e.getKey()));<a name="line.501"></a>
+<span class="sourceLineNo">502</span>          return false;<a name="line.502"></a>
+<span class="sourceLineNo">503</span>        }<a name="line.503"></a>
+<span class="sourceLineNo">504</span>      }<a name="line.504"></a>
+<span class="sourceLineNo">505</span>    }<a name="line.505"></a>
+<span class="sourceLineNo">506</span>    return true;<a name="line.506"></a>
+<span class="sourceLineNo">507</span>  }<a name="line.507"></a>
+<span class="sourceLineNo">508</span><a name="line.508"></a>
+<span class="sourceLineNo">509</span>  /**<a name="line.509"></a>
+<span class="sourceLineNo">510</span>   * @param conn the HBase cluster connection<a name="line.510"></a>
+<span class="sourceLineNo">511</span>   * @param tableName the table name of the table to load into<a name="line.511"></a>
+<span class="sourceLineNo">512</span>   * @param pool the ExecutorService<a name="line.512"></a>
+<span class="sourceLineNo">513</span>   * @param queue the queue for LoadQueueItem<a name="line.513"></a>
+<span class="sourceLineNo">514</span>   * @param startEndKeys start and end keys<a name="line.514"></a>
+<span class="sourceLineNo">515</span>   * @return A map that groups LQI by likely bulk load region targets and Set of missing hfiles.<a name="line.515"></a>
+<span class="sourceLineNo">516</span>   */<a name="line.516"></a>
+<span class="sourceLineNo">517</span>  private Pair&lt;Multimap&lt;ByteBuffer, LoadQueueItem&gt;, Set&lt;String&gt;&gt; groupOrSplitPhase(<a name="line.517"></a>
+<span class="sourceLineNo">518</span>      AsyncClusterConnection conn, TableName tableName, ExecutorService pool,<a name="line.518"></a>
+<span class="sourceLineNo">519</span>      Deque&lt;LoadQueueItem&gt; queue, List&lt;Pair&lt;byte[], byte[]&gt;&gt; startEndKeys) throws IOException {<a name="line.519"></a>
+<span class="sourceLineNo">520</span>    // &lt;region start key, LQI&gt; need synchronized only within this scope of this<a name="line.520"></a>
+<span class="sourceLineNo">521</span>    // phase because of the puts that happen in futures.<a name="line.521"></a>
+<span class="sourceLineNo">522</span>    Multimap&lt;ByteBuffer, LoadQueueItem&gt; rgs = HashMultimap.create();<a name="line.522"></a>
+<span class="sourceLineNo">523</span>    final Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups = Multimaps.synchronizedMultimap(rgs);<a name="line.523"></a>
+<span class="sourceLineNo">524</span>    Set&lt;String&gt; missingHFiles = new HashSet&lt;&gt;();<a name="line.524"></a>
+<span class="sourceLineNo">525</span>    Pair&lt;Multimap&lt;ByteBuffer, LoadQueueItem&gt;, Set&lt;String&gt;&gt; pair =<a name="line.525"></a>
+<span class="sourceLineNo">526</span>      new Pair&lt;&gt;(regionGroups, missingHFiles);<a name="line.526"></a>
+<span class="sourceLineNo">527</span><a name="line.527"></a>
+<span class="sourceLineNo">528</span>    // drain LQIs and figure out bulk load groups<a name="line.528"></a>
+<span class="sourceLineNo">529</span>    Set&lt;Future&lt;Pair&lt;List&lt;LoadQueueItem&gt;, String&gt;&gt;&gt; splittingFutures = new HashSet&lt;&gt;();<a name="line.529"></a>
+<span class="sourceLineNo">530</span>    while (!queue.isEmpty()) {<a name="line.530"></a>
+<span class="sourceLineNo">531</span>      final LoadQueueItem item = queue.remove();<a name="line.531"></a>
+<span class="sourceLineNo">532</span><a name="line.532"></a>
+<span class="sourceLineNo">533</span>      final Callable&lt;Pair&lt;List&lt;LoadQueueItem&gt;, String&gt;&gt; call =<a name="line.533"></a>
+<span class="sourceLineNo">534</span>        () -&gt; groupOrSplit(conn, tableName, regionGroups, item, startEndKeys);<a name="line.534"></a>
+<span class="sourceLineNo">535</span>      splittingFutures.add(pool.submit(call));<a name="line.535"></a>
+<span class="sourceLineNo">536</span>    }<a name="line.536"></a>
+<span class="sourceLineNo">537</span>    // get all the results. All grouping and splitting must finish before<a name="line.537"></a>
+<span class="sourceLineNo">538</span>    // we can attempt the atomic loads.<a name="line.538"></a>
+<span class="sourceLineNo">539</span>    for (Future&lt;Pair&lt;List&lt;LoadQueueItem&gt;, String&gt;&gt; lqis : splittingFutures) {<a name="line.539"></a>
+<span class="sourceLineNo">540</span>      try {<a name="line.540"></a>
+<span class="sourceLineNo">541</span>        Pair&lt;List&lt;LoadQueueItem&gt;, String&gt; splits = lqis.get();<a name="line.541"></a>
+<span class="sourceLineNo">542</span>        if (splits != null) {<a name="line.542"></a>
+<span class="sourceLineNo">543</span>          if (splits.getFirst() != null) {<a name="line.543"></a>
+<span class="sourceLineNo">544</span>            queue.addAll(splits.getFirst());<a name="line.544"></a>
+<span class="sourceLineNo">545</span>          } else {<a name="line.545"></a>
+<span class="sourceLineNo">546</span>            missingHFiles.add(splits.getSecond());<a name="line.546"></a>
+<span class="sourceLineNo">547</span>          }<a name="line.547"></a>
+<span class="sourceLineNo">548</span>        }<a name="line.548"></a>
+<span class="sourceLineNo">549</span>      } catch (ExecutionException e1) {<a name="line.549"></a>
+<span class="sourceLineNo">550</span>        Throwable t = e1.getCause();<a name="line.550"></a>
+<span class="sourceLineNo">551</span>        if (t instanceof IOException) {<a name="line.551"></a>
+<span class="sourceLineNo">552</span>          LOG.error("IOException during splitting", e1);<a name="line.552"></a>
+<span class="sourceLineNo">553</span>          throw (IOException) t; // would have been thrown if not parallelized,<a name="line.553"></a>
 <span class="sourceLineNo">554</span>        }<a name="line.554"></a>
-<span class="sourceLineNo">555</span>      } catch (ExecutionException e1) {<a name="line.555"></a>
-<span class="sourceLineNo">556</span>        Throwable t = e1.getCause();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>        if (t instanceof IOException) {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>          LOG.error("IOException during splitting", e1);<a name="line.558"></a>
-<span class="sourceLineNo">559</span>          throw (IOException) t; // would have been thrown if not parallelized,<a name="line.559"></a>
-<span class="sourceLineNo">560</span>        }<a name="line.560"></a>
-<span class="sourceLineNo">561</span>        LOG.error("Unexpected execution exception during splitting", e1);<a name="line.561"></a>
-<span class="sourceLineNo">562</span>        throw new IllegalStateException(t);<a name="line.562"></a>
-<span class="sourceLineNo">563</span>      } catch (InterruptedException e1) {<a name="line.563"></a>
-<span class="sourceLineNo">564</span>        LOG.error("Unexpected interrupted exception during splitting", e1);<a name="line.564"></a>
-<span class="sourceLineNo">565</span>        throw (InterruptedIOException) new InterruptedIOException().initCause(e1);<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      }<a name="line.566"></a>
-<span class="sourceLineNo">567</span>    }<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    return pair;<a name="line.568"></a>
-<span class="sourceLineNo">569</span>  }<a name="line.569"></a>
-<span class="sourceLineNo">570</span><a name="line.570"></a>
-<span class="sourceLineNo">571</span>  // unique file name for the table<a name="line.571"></a>
-<span class="sourceLineNo">572</span>  private String getUniqueName() {<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    return UUID.randomUUID().toString().replaceAll("-", "");<a name="line.573"></a>
-<span class="sourceLineNo">574</span>  }<a name="line.574"></a>
-<span class="sourceLineNo">575</span><a name="line.575"></a>
-<span class="sourceLineNo">576</span>  private List&lt;LoadQueueItem&gt; splitStoreFile(LoadQueueItem item, TableDescriptor tableDesc,<a name="line.576"></a>
-<span class="sourceLineNo">577</span>      byte[] splitKey) throws IOException {<a name="line.577"></a>
-<span class="sourceLineNo">578</span>    Path hfilePath = item.getFilePath();<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    byte[] family = item.getFamily();<a name="line.579"></a>
-<span class="sourceLineNo">580</span>    Path tmpDir = hfilePath.getParent();<a name="line.580"></a>
-<span class="sourceLineNo">581</span>    if (!tmpDir.getName().equals(TMP_DIR)) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>      tmpDir = new Path(tmpDir, TMP_DIR);<a name="line.582"></a>
-<span class="sourceLineNo">583</span>    }<a name="line.583"></a>
-<span class="sourceLineNo">584</span><a name="line.584"></a>
-<span class="sourceLineNo">585</span>    LOG.info("HFile at " + hfilePath + " no longer fits inside a single " + "region. Splitting...");<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>    String uniqueName = getUniqueName();<a name="line.587"></a>
-<span class="sourceLineNo">588</span>    ColumnFamilyDescriptor familyDesc = tableDesc.getColumnFamily(family);<a name="line.588"></a>
-<span class="sourceLineNo">589</span><a name="line.589"></a>
-<span class="sourceLineNo">590</span>    Path botOut = new Path(tmpDir, uniqueName + ".bottom");<a name="line.590"></a>
-<span class="sourceLineNo">591</span>    Path topOut = new Path(tmpDir, uniqueName + ".top");<a name="line.591"></a>
-<span class="sourceLineNo">592</span>    splitStoreFile(getConf(), hfilePath, familyDesc, splitKey, botOut, topOut);<a name="line.592"></a>
-<span class="sourceLineNo">593</span><a name="line.593"></a>
-<span class="sourceLineNo">594</span>    FileSystem fs = tmpDir.getFileSystem(getConf());<a name="line.594"></a>
-<span class="sourceLineNo">595</span>    fs.setPermission(tmpDir, FsPermission.valueOf("-rwxrwxrwx"));<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    fs.setPermission(botOut, FsPermission.valueOf("-rwxrwxrwx"));<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    fs.setPermission(topOut, FsPermission.valueOf("-rwxrwxrwx"));<a name="line.597"></a>
+<span class="sourceLineNo">555</span>        LOG.error("Unexpected execution exception during splitting", e1);<a name="line.555"></a>
+<span class="sourceLineNo">556</span>        throw new IllegalStateException(t);<a name="line.556"></a>
+<span class="sourceLineNo">557</span>      } catch (InterruptedException e1) {<a name="line.557"></a>
+<span class="sourceLineNo">558</span>        LOG.error("Unexpected interrupted exception during splitting", e1);<a name="line.558"></a>
+<span class="sourceLineNo">559</span>        throw (InterruptedIOException) new InterruptedIOException().initCause(e1);<a name="line.559"></a>
+<span class="sourceLineNo">560</span>      }<a name="line.560"></a>
+<span class="sourceLineNo">561</span>    }<a name="line.561"></a>
+<span class="sourceLineNo">562</span>    return pair;<a name="line.562"></a>
+<span class="sourceLineNo">563</span>  }<a name="line.563"></a>
+<span class="sourceLineNo">564</span><a name="line.564"></a>
+<span class="sourceLineNo">565</span>  // unique file name for the table<a name="line.565"></a>
+<span class="sourceLineNo">566</span>  private String getUniqueName() {<a name="line.566"></a>
+<span class="sourceLineNo">567</span>    return UUID.randomUUID().toString().replaceAll("-", "");<a name="line.567"></a>
+<span class="sourceLineNo">568</span>  }<a name="line.568"></a>
+<span class="sourceLineNo">569</span><a name="line.569"></a>
+<span class="sourceLineNo">570</span>  private List&lt;LoadQueueItem&gt; splitStoreFile(LoadQueueItem item, TableDescriptor tableDesc,<a name="line.570"></a>
+<span class="sourceLineNo">571</span>      byte[] splitKey) throws IOException {<a name="line.571"></a>
+<span class="sourceLineNo">572</span>    Path hfilePath = item.getFilePath();<a name="line.572"></a>
+<span class="sourceLineNo">573</span>    byte[] family = item.getFamily();<a name="line.573"></a>
+<span class="sourceLineNo">574</span>    Path tmpDir = hfilePath.getParent();<a name="line.574"></a>
+<span class="sourceLineNo">575</span>    if (!tmpDir.getName().equals(TMP_DIR)) {<a name="line.575"></a>
+<span class="sourceLineNo">576</span>      tmpDir = new Path(tmpDir, TMP_DIR);<a name="line.576"></a>
+<span class="sourceLineNo">577</span>    }<a name="line.577"></a>
+<span class="sourceLineNo">578</span><a name="line.578"></a>
+<span class="sourceLineNo">579</span>    LOG.info("HFile at " + hfilePath + " no longer fits inside a single " + "region. Splitting...");<a name="line.579"></a>
+<span class="sourceLineNo">580</span><a name="line.580"></a>
+<span class="sourceLineNo">581</span>    String uniqueName = getUniqueName();<a name="line.581"></a>
+<span class="sourceLineNo">582</span>    ColumnFamilyDescriptor familyDesc = tableDesc.getColumnFamily(family);<a name="line.582"></a>
+<span class="sourceLineNo">583</span><a name="line.583"></a>
+<span class="sourceLineNo">584</span>    Path botOut = new Path(tmpDir, uniqueName + ".bottom");<a name="line.584"></a>
+<span class="sourceLineNo">585</span>    Path topOut = new Path(tmpDir, uniqueName + ".top");<a name="line.585"></a>
+<span class="sourceLineNo">586</span>    splitStoreFile(getConf(), hfilePath, familyDesc, splitKey, botOut, topOut);<a name="line.586"></a>
+<span class="sourceLineNo">587</span><a name="line.587"></a>
+<span class="sourceLineNo">588</span>    FileSystem fs = tmpDir.getFileSystem(getConf());<a name="line.588"></a>
+<span class="sourceLineNo">589</span>    fs.setPermission(tmpDir, FsPermission.valueOf("-rwxrwxrwx"));<a name="line.589"></a>
+<span class="sourceLineNo">590</span>    fs.setPermission(botOut, FsPermission.valueOf("-rwxrwxrwx"));<a name="line.590"></a>
+<span class="sourceLineNo">591</span>    fs.setPermission(topOut, FsPermission.valueOf("-rwxrwxrwx"));<a name="line.591"></a>
+<span class="sourceLineNo">592</span><a name="line.592"></a>
+<span class="sourceLineNo">593</span>    // Add these back at the *front* of the queue, so there's a lower<a name="line.593"></a>
+<span class="sourceLineNo">594</span>    // chance that the region will just split again before we get there.<a name="line.594"></a>
+<span class="sourceLineNo">595</span>    List&lt;LoadQueueItem&gt; lqis = new ArrayList&lt;&gt;(2);<a name="line.595"></a>
+<span class="sourceLineNo">596</span>    lqis.add(new LoadQueueItem(family, botOut));<a name="line.596"></a>
+<span class="sourceLineNo">597</span>    lqis.add(new LoadQueueItem(family, topOut));<a name="line.597"></a>
 <span class="sourceLineNo">598</span><a name="line.598"></a>
-<span class="sourceLineNo">599</span>    // Add these back at the *front* of the queue, so there's a lower<a name="line.599"></a>
-<span class="sourceLineNo">600</span>    // chance that the region will just split again before we get there.<a name="line.600"></a>
-<span class="sourceLineNo">601</span>    List&lt;LoadQueueItem&gt; lqis = new ArrayList&lt;&gt;(2);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>    lqis.add(new LoadQueueItem(family, botOut));<a name="line.602"></a>
-<span class="sourceLineNo">603</span>    lqis.add(new LoadQueueItem(family, topOut));<a name="line.603"></a>
-<span class="sourceLineNo">604</span><a name="line.604"></a>
-<span class="sourceLineNo">605</span>    // If the current item is already the result of previous splits,<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    // we don't need it anymore. Clean up to save space.<a name="line.606"></a>
-<span class="sourceLineNo">607</span>    // It is not part of the original input files.<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    try {<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      if (tmpDir.getName().equals(TMP_DIR)) {<a name="line.609"></a>
-<span class="sourceLineNo">610</span>        fs.delete(hfilePath, false);<a name="line.610"></a>
-<span class="sourceLineNo">611</span>      }<a name="line.611"></a>
-<span class="sourceLineNo">612</span>    } catch (IOException e) {<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      LOG.warn("Unable to delete temporary split file " + hfilePath);<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    }<a name="line.614"></a>
-<span class="sourceLineNo">615</span>    LOG.info("Successfully split into new HFiles " + botOut + " and " + topOut);<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    return lqis;<a name="line.616"></a>
-<span class="sourceLineNo">617</span>  }<a name="line.617"></a>
-<span class="sourceLineNo">618</span><a name="line.618"></a>
-<span class="sourceLineNo">619</span>  /**<a name="line.619"></a>
-<span class="sourceLineNo">620</span>   * Attempt to assign the given load queue item into its target region group. If the hfile boundary<a name="line.620"></a>
-<span class="sourceLineNo">621</span>   * no longer fits into a region, physically splits the hfile such that the new bottom half will<a name="line.621"></a>
-<span class="sourceLineNo">622</span>   * fit and returns the list of LQI's corresponding to the resultant hfiles.<a name="line.622"></a>
-<span class="sourceLineNo">623</span>   * &lt;p/&gt;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>   * protected for testing<a name="line.624"></a>
-<span class="sourceLineNo">625</span>   * @throws IOException if an IO failure is encountered<a name="line.625"></a>
-<span class="sourceLineNo">626</span>   */<a name="line.626"></a>
-<span class="sourceLineNo">627</span>  @VisibleForTesting<a name="line.627"></a>
-<span class="sourceLineNo">628</span>  protected Pair&lt;List&lt;LoadQueueItem&gt;, String&gt; groupOrSplit(AsyncClusterConnection conn,<a name="line.628"></a>
-<span class="sourceLineNo">629</span>      TableName tableName, Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups, LoadQueueItem item,<a name="line.629"></a>
-<span class="sourceLineNo">630</span>      List&lt;Pair&lt;byte[], byte[]&gt;&gt; startEndKeys) throws IOException {<a name="line.630"></a>
-<span class="sourceLineNo">631</span>    Path hfilePath = item.getFilePath();<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    Optional&lt;byte[]&gt; first, last;<a name="line.632"></a>
-<span class="sourceLineNo">633</span>    try (HFile.Reader hfr = HFile.createReader(hfilePath.getFileSystem(getConf()), hfilePath,<a name="line.633"></a>
-<span class="sourceLineNo">634</span>      CacheConfig.DISABLED, true, getConf())) {<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      first = hfr.getFirstRowKey();<a name="line.635"></a>
-<span class="sourceLineNo">636</span>      last = hfr.getLastRowKey();<a name="line.636"></a>
-<span class="sourceLineNo">637</span>    } catch (FileNotFoundException fnfe) {<a name="line.637"></a>
-<span class="sourceLineNo">638</span>      LOG.debug("encountered", fnfe);<a name="line.638"></a>
-<span class="sourceLineNo">639</span>      return new Pair&lt;&gt;(null, hfilePath.getName());<a name="line.639"></a>
-<span class="sourceLineNo">640</span>    }<a name="line.640"></a>
-<span class="sourceLineNo">641</span><a name="line.641"></a>
-<span class="sourceLineNo">642</span>    LOG.info("Trying to load hfile=" + hfilePath + " first=" + first.map(Bytes::toStringBinary) +<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      " last=" + last.map(Bytes::toStringBinary));<a name="line.643"></a>
-<span class="sourceLineNo">644</span>    if (!first.isPresent() || !last.isPresent()) {<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      assert !first.isPresent() &amp;&amp; !last.isPresent();<a name="line.645"></a>
-<span class="sourceLineNo">646</span>      // TODO what if this is due to a bad HFile?<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      LOG.info("hfile " + hfilePath + " has no entries, skipping");<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      return null;<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    }<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    if (Bytes.compareTo(first.get(), last.get()) &gt; 0) {<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      throw new IllegalArgumentException("Invalid range: " + Bytes.toStringBinary(first.get()) +<a name="line.651"></a>
-<span class="sourceLineNo">652</span>        " &gt; " + Bytes.toStringBinary(last.get()));<a name="line.652"></a>
-<span class="sourceLineNo">653</span>    }<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    int idx =<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      Collections.binarySearch(startEndKeys, Pair.newPair(first.get(), HConstants.EMPTY_END_ROW),<a name="line.655"></a>
-<span class="sourceLineNo">656</span>        (p1, p2) -&gt; Bytes.compareTo(p1.getFirst(), p2.getFirst()));<a name="line.656"></a>
-<span class="sourceLineNo">657</span>    if (idx &lt; 0) {<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      // not on boundary, returns -(insertion index). Calculate region it<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      // would be in.<a name="line.659"></a>
-<span class="sourceLineNo">660</span>      idx = -(idx + 1) - 1;<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    }<a name="line.661"></a>
-<span class="sourceLineNo">662</span>    int indexForCallable = idx;<a name="line.662"></a>
-<span class="sourceLineNo">663</span><a name="line.663"></a>
-<span class="sourceLineNo">664</span>    /*<a name="line.664"></a>
-<span class="sourceLineNo">665</span>     * we can consider there is a region hole in following conditions. 1) if idx &lt; 0,then first<a name="line.665"></a>
-<span class="sourceLineNo">666</span>     * region info is lost. 2) if the endkey of a region is not equal to the startkey of the next<a name="line.666"></a>
-<span class="sourceLineNo">667</span>     * region. 3) if the endkey of the last region is not empty.<a name="line.667"></a>
-<span class="sourceLineNo">668</span>     */<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    if (indexForCallable &lt; 0) {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      throw new IOException("The first region info for table " + tableName +<a name="line.670"></a>
-<span class="sourceLineNo">671</span>        " can't be found in hbase:meta.Please use hbck tool to fix it first.");<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    } else if ((indexForCallable == startEndKeys.size() - 1) &amp;&amp;<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      !Bytes.equals(startEndKeys.get(indexForCallable).getSecond(), HConstants.EMPTY_BYTE_ARRAY)) {<a name="line.673"></a>
-<span class="sourceLineNo">674</span>      throw new IOException("The last region info for table " + tableName +<a name="line.674"></a>
-<span class="sourceLineNo">675</span>        " can't be found in hbase:meta.Please use hbck tool to fix it first.");<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    } else if (indexForCallable + 1 &lt; startEndKeys.size() &amp;&amp;<a name="line.676"></a>
-<span class="sourceLineNo">677</span>      !(Bytes.compareTo(startEndKeys.get(indexForCallable).getSecond(),<a name="line.677"></a>
-<span class="sourceLineNo">678</span>        startEndKeys.get(indexForCallable + 1).getFirst()) == 0)) {<a name="line.678"></a>
-<span class="sourceLineNo">679</span>      throw new IOException("The endkey of one region for table " + tableName +<a name="line.679"></a>
-<span class="sourceLineNo">680</span>        " is not equal to the startkey of the next region in hbase:meta." +<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        "Please use hbck tool to fix it first.");<a name="line.681"></a>
-<span class="sourceLineNo">682</span>    }<a name="line.682"></a>
-<span class="sourceLineNo">683</span><a name="line.683"></a>
-<span class="sourceLineNo">684</span>    boolean lastKeyInRange = Bytes.compareTo(last.get(), startEndKeys.get(idx).getSecond()) &lt; 0 ||<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      Bytes.equals(startEndKeys.get(idx).getSecond(), HConstants.EMPTY_BYTE_ARRAY);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    if (!lastKeyInRange) {<a name="line.686"></a>
-<span class="sourceLineNo">687</span>      Pair&lt;byte[], byte[]&gt; startEndKey = startEndKeys.get(indexForCallable);<a name="line.687"></a>
-<span class="sourceLineNo">688</span>      List&lt;LoadQueueItem&gt; lqis =<a name="line.688"></a>
-<span class="sourceLineNo">689</span>        splitStoreFile(item, FutureUtils.get(conn.getAdmin().getDescriptor(tableName)),<a name="line.689"></a>
-<span class="sourceLineNo">690</span>            startEndKey.getSecond());<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      return new Pair&lt;&gt;(lqis, null);<a name="line.691"></a>
-<span class="sourceLineNo">692</span>    }<a name="line.692"></a>
-<span class="sourceLineNo">693</span><a name="line.693"></a>
-<span class="sourceLineNo">694</span>    // group regions.<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    regionGroups.put(ByteBuffer.wrap(startEndKeys.get(idx).getFirst()), item);<a name="line.695"></a>
-<span class="sourceLineNo">696</span>    return null;<a name="line.696"></a>
-<span class="sourceLineNo">697</span>  }<a name="line.697"></a>
-<span class="sourceLineNo">698</span><a name="line.698"></a>
-<span class="sourceLineNo">699</span>  /**<a name="line.699"></a>
-<span class="sourceLineNo">700</span>   * Split a storefile into a top and bottom half, maintaining the metadata, recreating bloom<a name="line.700"></a>
-<span class="sourceLineNo">701</span>   * filters, etc.<a name="line.701"></a>
-<span class="sourceLineNo">702</span>   */<a name="line.702"></a>
-<span class="sourceLineNo">703</span>  @VisibleForTesting<a name="line.703"></a>
-<span class="sourceLineNo">704</span>  static void splitStoreFile(Configuration conf, Path inFile, ColumnFamilyDescriptor familyDesc,<a name="line.704"></a>
-<span class="sourceLineNo">705</span>      byte[] splitKey, Path bottomOut, Path topOut) throws IOException {<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    // Open reader with no block cache, and not in-memory<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    Reference topReference = Reference.createTopReference(splitKey);<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    Reference bottomReference = Reference.createBottomReference(splitKey);<a name="line.708"></a>
-<span class="sourceLineNo">709</span><a name="line.709"></a>
-<span class="sourceLineNo">710</span>    copyHFileHalf(conf, inFile, topOut, topReference, familyDesc);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    copyHFileHalf(conf, inFile, bottomOut, bottomReference, familyDesc);<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  /**<a name="line.714"></a>
-<span class="sourceLineNo">715</span>   * Copy half of an HFile into a new HFile.<a name="line.715"></a>
-<span class="sourceLineNo">716</span>   */<a name="line.716"></a>
-<span class="sourceLineNo">717</span>  private static void copyHFileHalf(Configuration conf, Path inFile, Path outFile,<a name="line.717"></a>
-<span class="sourceLineNo">718</span>      Reference reference, ColumnFamilyDescriptor familyDescriptor) throws IOException {<a name="line.718"></a>
-<span class="sourceLineNo">719</span>    FileSystem fs = inFile.getFileSystem(conf);<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    CacheConfig cacheConf = CacheConfig.DISABLED;<a name="line.720"></a>
-<span class="sourceLineNo">721</span>    HalfStoreFileReader halfReader = null;<a name="line.721"></a>
-<span class="sourceLineNo">722</span>    StoreFileWriter halfWriter = null;<a name="line.722"></a>
-<span class="sourceLineNo">723</span>    try {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>      ReaderContext context = new ReaderContextBuilder()<a name="line.724"></a>
-<span class="sourceLineNo">725</span>          .withFileSystemAndPath(fs, inFile).build();<a name="line.725"></a>
-<span class="sourceLineNo">726</span>      HFileInfo hfile = new HFileInfo(context, conf);<a name="line.726"></a>
-<span class="sourceLineNo">727</span>      halfReader = new HalfStoreFileReader(context, hfile, cacheConf, reference,<a name="line.727"></a>
-<span class="sourceLineNo">728</span>        new AtomicInteger(0), conf);<a name="line.728"></a>
-<span class="sourceLineNo">729</span>      hfile.initMetaAndIndex(halfReader.getHFileReader());<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      Map&lt;byte[], byte[]&gt; fileInfo = halfReader.loadFileInfo();<a name="line.730"></a>
-<span class="sourceLineNo">731</span><a name="line.731"></a>
-<span class="sourceLineNo">732</span>      int blocksize = familyDescriptor.getBlocksize();<a name="line.732"></a>
-<span class="sourceLineNo">733</span>      Algorithm compression = familyDescriptor.getCompressionType();<a name="line.733"></a>
-<span class="sourceLineNo">734</span>      BloomType bloomFilterType = familyDescriptor.getBloomFilterType();<a name="line.734"></a>
-<span class="sourceLineNo">735</span>      HFileContext hFileContext = new HFileContextBuilder().withCompression(compression)<a name="line.735"></a>
-<span class="sourceLineNo">736</span>        .withChecksumType(HStore.getChecksumType(conf))<a name="line.736"></a>
-<span class="sourceLineNo">737</span>        .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf)).withBlockSize(blocksize)<a name="line.737"></a>
-<span class="sourceLineNo">738</span>        .withDataBlockEncoding(familyDescriptor.getDataBlockEncoding()).withIncludesTags(true)<a name="line.738"></a>
-<span class="sourceLineNo">739</span>        .build();<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      halfWriter = new StoreFileWriter.Builder(conf, cacheConf, fs).withFilePath(outFile)<a name="line.740"></a>
-<span class="sourceLineNo">741</span>        .withBloomType(bloomFilterType).withFileContext(hFileContext).build();<a name="line.741"></a>
-<span class="sourceLineNo">742</span>      HFileScanner scanner = halfReader.getScanner(false, false, false);<a name="line.742"></a>
-<span class="sourceLineNo">743</span>      scanner.seekTo();<a name="line.743"></a>
-<span class="sourceLineNo">744</span>      do {<a name="line.744"></a>
-<span class="sourceLineNo">745</span>        halfWriter.append(scanner.getCell());<a name="line.745"></a>
-<span class="sourceLineNo">746</span>      } while (scanner.next());<a name="line.746"></a>
-<span class="sourceLineNo">747</span><a name="line.747"></a>
-<span class="sourceLineNo">748</span>      for (Map.Entry&lt;byte[], byte[]&gt; entry : fileInfo.entrySet()) {<a name="line.748"></a>
-<span class="sourceLineNo">749</span>        if (shouldCopyHFileMetaKey(entry.getKey())) {<a name="line.749"></a>
-<span class="sourceLineNo">750</span>          halfWriter.appendFileInfo(entry.getKey(), entry.getValue());<a name="line.750"></a>
-<span class="sourceLineNo">751</span>        }<a name="line.751"></a>
-<span class="sourceLineNo">752</span>      }<a name="line.752"></a>
-<span class="sourceLineNo">753</span>    } finally {<a name="line.753"></a>
-<span class="sourceLineNo">754</span>      if (halfReader != null) {<a name="line.754"></a>
-<span class="sourceLineNo">755</span>        try {<a name="line.755"></a>
-<span class="sourceLineNo">756</span>          halfReader.close(cacheConf.shouldEvictOnClose());<a name="line.756"></a>
-<span class="sourceLineNo">757</span>        } catch (IOException e) {<a name="line.757"></a>
-<span class="sourceLineNo">758</span>          LOG.warn("failed to close hfile reader for " + inFile, e);<a name="line.758"></a>
-<span class="sourceLineNo">759</span>        }<a name="line.759"></a>
-<span class="sourceLineNo">760</span>      }<a name="line.760"></a>
-<span class="sourceLineNo">761</span>      if (halfWriter != null) {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>        halfWriter.close();<a name="line.762"></a>
-<span class="sourceLineNo">763</span>      }<a name="line.763"></a>
-<span class="sourceLineNo">764</span>    }<a name="line.764"></a>
-<span class="sourceLineNo">765</span>  }<a name="line.765"></a>
-<span class="sourceLineNo">766</span><a name="line.766"></a>
-<span class="sourceLineNo">767</span>  /**<a name="line.767"></a>
-<span class="sourceLineNo">768</span>   * Infers region boundaries for a new table.<a name="line.768"></a>
-<span class="sourceLineNo">769</span>   * &lt;p/&gt;<a name="line.769"></a>
-<span class="sourceLineNo">770</span>   * Parameter: &lt;br/&gt;<a name="line.770"></a>
-<span class="sourceLineNo">771</span>   * bdryMap is a map between keys to an integer belonging to {+1, -1}<a name="line.771"></a>
-<span class="sourceLineNo">772</span>   * &lt;ul&gt;<a name="line.772"></a>
-<span class="sourceLineNo">773</span>   * &lt;li&gt;If a key is a start key of a file, then it maps to +1&lt;/li&gt;<a name="line.773"></a>
-<span class="sourceLineNo">774</span>   * &lt;li&gt;If a key is an end key of a file, then it maps to -1&lt;/li&gt;<a name="line.774"></a>
-<span class="sourceLineNo">775</span>   * &lt;/ul&gt;<a name="line.775"></a>
-<span class="sourceLineNo">776</span>   * &lt;p&gt;<a name="line.776"></a>
-<span class="sourceLineNo">777</span>   * Algo:&lt;br/&gt;<a name="line.777"></a>
-<span class="sourceLineNo">778</span>   * &lt;ol&gt;<a name="line.778"></a>
-<span class="sourceLineNo">779</span>   * &lt;li&gt;Poll on the keys in order:<a name="line.779"></a>
-<span class="sourceLineNo">780</span>   * &lt;ol type="a"&gt;<a name="line.780"></a>
-<span class="sourceLineNo">781</span>   * &lt;li&gt;Keep adding the mapped values to these keys (runningSum)&lt;/li&gt;<a name="line.781"></a>
-<span class="sourceLineNo">782</span>   * &lt;li&gt;Each time runningSum reaches 0, add the start Key from when the runningSum had started to a<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * boundary list.&lt;/li&gt;<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   * &lt;/ol&gt;<a name="line.784"></a>
-<span class="sourceLineNo">785</span>   * &lt;/li&gt;<a name="line.785"></a>
-<span class="sourceLineNo">786</span>   * &lt;li&gt;Return the boundary list.&lt;/li&gt;<a name="line.786"></a>
-<span class="sourceLineNo">787</span>   * &lt;/ol&gt;<a name="line.787"></a>
-<span class="sourceLineNo">788</span>   */<a name="line.788"></a>
-<span class="sourceLineNo">789</span>  public static byte[][] inferBoundaries(SortedMap&lt;byte[], Integer&gt; bdryMap) {<a name="line.789"></a>
-<span class="sourceLineNo">790</span>    List&lt;byte[]&gt; keysArray = new ArrayList&lt;&gt;();<a name="line.790"></a>
-<span class="sourceLineNo">791</span>    int runningValue = 0;<a name="line.791"></a>
-<span class="sourceLineNo">792</span>    byte[] currStartKey = null;<a name="line.792"></a>
-<span class="sourceLineNo">793</span>    boolean firstBoundary = true;<a name="line.793"></a>
-<span class="sourceLineNo">794</span><a name="line.794"></a>
-<span class="sourceLineNo">795</span>    for (Map.Entry&lt;byte[], Integer&gt; item : bdryMap.entrySet()) {<a name="line.795"></a>
-<span class="sourceLineNo">796</span>      if (runningValue == 0) {<a name="line.796"></a>
-<span class="sourceLineNo">797</span>        currStartKey = item.getKey();<a name="line.797"></a>
-<span class="sourceLineNo">798</span>      }<a name="line.798"></a>
-<span class="sourceLineNo">799</span>      runningValue += item.getValue();<a name="line.799"></a>
-<span class="sourceLineNo">800</span>      if (runningValue == 0) {<a name="line.800"></a>
-<span class="sourceLineNo">801</span>        if (!firstBoundary) {<a name="line.801"></a>
-<span class="sourceLineNo">802</span>          keysArray.add(currStartKey);<a name="line.802"></a>
-<span class="sourceLineNo">803</span>        }<a name="line.803"></a>
-<span class="sourceLineNo">804</span>        firstBoundary = false;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>      }<a name="line.805"></a>
-<span class="sourceLineNo">806</span>    }<a name="line.806"></a>
-<span class="sourceLineNo">807</span><a name="line.807"></a>
-<span class="sourceLineNo">808</span>    return keysArray.toArray(new byte[0][]);<a name="line.808"></a>
-<span class="sourceLineNo">809</span>  }<a name="line.809"></a>
-<span class="sourceLineNo">810</span><a name="line.810"></a>
-<span class="sourceLineNo">811</span>  /**<a name="line.811"></a>
-<span class="sourceLineNo">812</span>   * If the table is created for the first time, then "completebulkload" reads the files twice. More<a name="line.812"></a>
-<span class="sourceLineNo">813</span>   * modifications necessary if we want to avoid doing it.<a name="line.813"></a>
-<span class="sourceLineNo">814</span>   */<a name="line.814"></a>
-<span class="sourceLineNo">815</span>  private void createTable(TableName tableName, Path hfofDir, AsyncAdmin admin) throws IOException {<a name="line.815"></a>
-<span class="sourceLineNo">816</span>    final FileSystem fs = hfofDir.getFileSystem(getConf());<a name="line.816"></a>
-<span class="sourceLineNo">817</span><a name="line.817"></a>
-<span class="sourceLineNo">818</span>    // Add column families<a name="line.818"></a>
-<span class="sourceLineNo">819</span>    // Build a set of keys<a name="line.819"></a>
-<span class="sourceLineNo">820</span>    List&lt;ColumnFamilyDescriptorBuilder&gt; familyBuilders = new ArrayList&lt;&gt;();<a name="line.820"></a>
-<span class="sourceLineNo">821</span>    SortedMap&lt;byte[], Integer&gt; map = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.821"></a>
-<span class="sourceLineNo">822</span>    visitBulkHFiles(fs, hfofDir, new BulkHFileVisitor&lt;ColumnFamilyDescriptorBuilder&gt;() {<a name="line.822"></a>
-<span class="sourceLineNo">823</span>      @Override<a name="line.823"></a>
-<span class="sourceLineNo">824</span>      public ColumnFamilyDescriptorBuilder bulkFamily(byte[] familyName) {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>        ColumnFamilyDescriptorBuilder builder =<a name="line.825"></a>
-<span class="sourceLineNo">826</span>          ColumnFamilyDescriptorBuilder.newBuilder(familyName);<a name="line.826"></a>
-<span class="sourceLineNo">827</span>        familyBuilders.add(builder);<a name="line.827"></a>
-<span class="sourceLineNo">828</span>        return builder;<a name="line.828"></a>
-<span class="sourceLineNo">829</span>      }<a name="line.829"></a>
-<span class="sourceLineNo">830</span><a name="line.830"></a>
-<span class="sourceLineNo">831</span>      @Override<a name="line.831"></a>
-<span class="sourceLineNo">832</span>      public void bulkHFile(ColumnFamilyDescriptorBuilder builder, FileStatus hfileStatus)<a name="line.832"></a>
-<span class="sourceLineNo">833</span>          throws IOException {<a name="line.833"></a>
-<span class="sourceLineNo">834</span>        Path hfile = hfileStatus.getPath();<a name="line.834"></a>
-<span class="sourceLineNo">835</span>        try (HFile.Reader reader =<a name="line.835"></a>
-<span class="sourceLineNo">836</span>          HFile.createReader(fs, hfile, CacheConfig.DISABLED, true, getConf())) {<a name="line.836"></a>
-<span class="sourceLineNo">837</span>          if (builder.getCompressionType() != reader.getFileContext().getCompression()) {<a name="line.837"></a>
-<span class="sourceLineNo">838</span>            builder.setCompressionType(reader.getFileContext().getCompression());<a name="line.838"></a>
-<span class="sourceLineNo">839</span>            LOG.info("Setting compression " + reader.getFileContext().getCompression().name() +<a name="line.839"></a>
-<span class="sourceLineNo">840</span>              " for family " + builder.getNameAsString());<a name="line.840"></a>
-<span class="sourceLineNo">841</span>          }<a name="line.841"></a>
-<span class="sourceLineNo">842</span>          byte[] first = reader.getFirstRowKey().get();<a name="line.842"></a>
-<span class="sourceLineNo">843</span>          byte[] last = reader.getLastRowKey().get();<a name="line.843"></a>
-<span class="sourceLineNo">844</span><a name="line.844"></a>
-<span class="sourceLineNo">845</span>          LOG.info("Trying to figure out region boundaries hfile=" + hfile + " first=" +<a name="line.845"></a>
-<span class="sourceLineNo">846</span>            Bytes.toStringBinary(first) + " last=" + Bytes.toStringBinary(last));<a name="line.846"></a>
-<span class="sourceLineNo">847</span><a name="line.847"></a>
-<span class="sourceLineNo">848</span>          // To eventually infer start key-end key boundaries<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          Integer value = map.getOrDefault(first, 0);<a name="line.849"></a>
-<span class="sourceLineNo">850</span>          map.put(first, value + 1);<a name="line.850"></a>
+<span class="sourceLineNo">599</span>    // If the current item is already the result of previous splits,<a name="line.599"></a>
+<span class="sourceLineNo">600</span>    // we don't need it anymore. Clean up to save space.<a name="line.600"></a>
+<span class="sourceLineNo">601</span>    // It is not part of the original input files.<a name="line.601"></a>
+<span class="sourceLineNo">602</span>    try {<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      if (tmpDir.getName().equals(TMP_DIR)) {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>        fs.delete(hfilePath, false);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    } catch (IOException e) {<a name="line.606"></a>
+<span class="sourceLineNo">607</span>      LOG.warn("Unable to delete temporary split file " + hfilePath);<a name="line.607"></a>
+<span class="sourceLineNo">608</span>    }<a name="line.608"></a>
+<span class="sourceLineNo">609</span>    LOG.info("Successfully split into new HFiles " + botOut + " and " + topOut);<a name="line.609"></a>
+<span class="sourceLineNo">610</span>    return lqis;<a name="line.610"></a>
+<span class="sourceLineNo">611</span>  }<a name="line.611"></a>
+<span class="sourceLineNo">612</span><a name="line.612"></a>
+<span class="sourceLineNo">613</span>  /**<a name="line.613"></a>
+<span class="sourceLineNo">614</span>   * Attempt to assign the given load queue item into its target region group. If the hfile boundary<a name="line.614"></a>
+<span class="sourceLineNo">615</span>   * no longer fits into a region, physically splits the hfile such that the new bottom half will<a name="line.615"></a>
+<span class="sourceLineNo">616</span>   * fit and returns the list of LQI's corresponding to the resultant hfiles.<a name="line.616"></a>
+<span class="sourceLineNo">617</span>   * &lt;p/&gt;<a name="line.617"></a>
+<span class="sourceLineNo">618</span>   * protected for testing<a name="line.618"></a>
+<span class="sourceLineNo">619</span>   * @throws IOException if an IO failure is encountered<a name="line.619"></a>
+<span class="sourceLineNo">620</span>   */<a name="line.620"></a>
+<span class="sourceLineNo">621</span>  @VisibleForTesting<a name="line.621"></a>
+<span class="sourceLineNo">622</span>  protected Pair&lt;List&lt;LoadQueueItem&gt;, String&gt; groupOrSplit(AsyncClusterConnection conn,<a name="line.622"></a>
+<span class="sourceLineNo">623</span>      TableName tableName, Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups, LoadQueueItem item,<a name="line.623"></a>
+<span class="sourceLineNo">624</span>      List&lt;Pair&lt;byte[], byte[]&gt;&gt; startEndKeys) throws IOException {<a name="line.624"></a>
+<span class="sourceLineNo">625</span>    Path hfilePath = item.getFilePath();<a name="line.625"></a>
+<span class="sourceLineNo">626</span>    Optional&lt;byte[]&gt; first, last;<a name="line.626"></a>
+<span class="sourceLineNo">627</span>    try (HFile.Reader hfr = HFile.createReader(hfilePath.getFileSystem(getConf()), hfilePath,<a name="line.627"></a>
+<span class="sourceLineNo">628</span>      CacheConfig.DISABLED, true, getConf())) {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>      first = hfr.getFirstRowKey();<a name="line.629"></a>
+<span class="sourceLineNo">630</span>      last = hfr.getLastRowKey();<a name="line.630"></a>
+<span class="sourceLineNo">631</span>    } catch (FileNotFoundException fnfe) {<a name="line.631"></a>
+<span class="sourceLineNo">632</span>      LOG.debug("encountered", fnfe);<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      return new Pair&lt;&gt;(null, hfilePath.getName());<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
+<span class="sourceLineNo">635</span><a name="line.635"></a>
+<span class="sourceLineNo">636</span>    LOG.info("Trying to load hfile=" + hfilePath + " first=" + first.map(Bytes::toStringBinary) +<a name="line.636"></a>
+<span class="sourceLineNo">637</span>      " last=" + last.map(Bytes::toStringBinary));<a name="line.637"></a>
+<span class="sourceLineNo">638</span>    if (!first.isPresent() || !last.isPresent()) {<a name="line.638"></a>
+<span class="sourceLineNo">639</span>      assert !first.isPresent() &amp;&amp; !last.isPresent();<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      // TODO what if this is due to a bad HFile?<a name="line.640"></a>
+<span class="sourceLineNo">641</span>      LOG.info("hfile " + hfilePath + " has no entries, skipping");<a name="line.641"></a>
+<span class="sourceLineNo">642</span>      return null;<a name="line.642"></a>
+<span class="sourceLineNo">643</span>    }<a name="line.643"></a>
+<span class="sourceLineNo">644</span>    if (Bytes.compareTo(first.get(), last.get()) &gt; 0) {<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      throw new IllegalArgumentException("Invalid range: " + Bytes.toStringBinary(first.get()) +<a name="line.645"></a>
+<span class="sourceLineNo">646</span>        " &gt; " + Bytes.toStringBinary(last.get()));<a name="line.646"></a>
+<span class="sourceLineNo">647</span>    }<a name="line.647"></a>
+<span class="sourceLineNo">648</span>    int idx =<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      Collections.binarySearch(startEndKeys, Pair.newPair(first.get(), HConstants.EMPTY_END_ROW),<a name="line.649"></a>
+<span class="sourceLineNo">650</span>        (p1, p2) -&gt; Bytes.compareTo(p1.getFirst(), p2.getFirst()));<a name="line.650"></a>
+<span class="sourceLineNo">651</span>    if (idx &lt; 0) {<a name="line.651"></a>
+<span class="sourceLineNo">652</span>      // not on boundary, returns -(insertion index). Calculate region it<a name="line.652"></a>
+<span class="sourceLineNo">653</span>      // would be in.<a name="line.653"></a>
+<span class="sourceLineNo">654</span>      idx = -(idx + 1) - 1;<a name="line.654"></a>
+<span class="sourceLineNo">655</span>    }<a name="line.655"></a>
+<span class="sourceLineNo">656</span>    int indexForCallable = idx;<a name="line.656"></a>
+<span class="sourceLineNo">657</span><a name="line.657"></a>
+<span class="sourceLineNo">658</span>    /*<a name="line.658"></a>
+<span class="sourceLineNo">659</span>     * we can consider there is a region hole in following conditions. 1) if idx &lt; 0,then first<a name="line.659"></a>
+<span class="sourceLineNo">660</span>     * region info is lost. 2) if the endkey of a region is not equal to the startkey of the next<a name="line.660"></a>
+<span class="sourceLineNo">661</span>     * region. 3) if the endkey of the last region is not empty.<a name="line.661"></a>
+<span class="sourceLineNo">662</span>     */<a name="line.662"></a>
+<span class="sourceLineNo">663</span>    if (indexForCallable &lt; 0) {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>      throw new IOException("The first region info for table " + tableName +<a name="line.664"></a>
+<span class="sourceLineNo">665</span>        " can't be found in hbase:meta.Please use hbck tool to fix it first.");<a name="line.665"></a>
+<span class="sourceLineNo">666</span>    } else if ((indexForCallable == startEndKeys.size() - 1) &amp;&amp;<a name="line.666"></a>
+<span class="sourceLineNo">667</span>      !Bytes.equals(startEndKeys.get(indexForCallable).getSecond(), HConstants.EMPTY_BYTE_ARRAY)) {<a name="line.667"></a>
+<span class="sourceLineNo">668</span>      throw new IOException("The last region info for table " + tableName +<a name="line.668"></a>
+<span class="sourceLineNo">669</span>        " can't be found in hbase:meta.Please use hbck tool to fix it first.");<a name="line.669"></a>
+<span class="sourceLineNo">670</span>    } else if (indexForCallable + 1 &lt; startEndKeys.size() &amp;&amp;<a name="line.670"></a>
+<span class="sourceLineNo">671</span>      !(Bytes.compareTo(startEndKeys.get(indexForCallable).getSecond(),<a name="line.671"></a>
+<span class="sourceLineNo">672</span>        startEndKeys.get(indexForCallable + 1).getFirst()) == 0)) {<a name="line.672"></a>
+<span class="sourceLineNo">673</span>      throw new IOException("The endkey of one region for table " + tableName +<a name="line.673"></a>
+<span class="sourceLineNo">674</span>        " is not equal to the startkey of the next region in hbase:meta." +<a name="line.674"></a>
+<span class="sourceLineNo">675</span>        "Please use hbck tool to fix it first.");<a name="line.675"></a>
+<span class="sourceLineNo">676</span>    }<a name="line.676"></a>
+<span class="sourceLineNo">677</span><a name="line.677"></a>
+<span class="sourceLineNo">678</span>    boolean lastKeyInRange = Bytes.compareTo(last.get(), startEndKeys.get(idx).getSecond()) &lt; 0 ||<a name="line.678"></a>
+<span class="sourceLineNo">679</span>      Bytes.equals(startEndKeys.get(idx).getSecond(), HConstants.EMPTY_BYTE_ARRAY);<a name="line.679"></a>
+<span class="sourceLineNo">680</span>    if (!lastKeyInRange) {<a name="line.680"></a>
+<span class="sourceLineNo">681</span>      Pair&lt;byte[], byte[]&gt; startEndKey = startEndKeys.get(indexForCallable);<a name="line.681"></a>
+<span class="sourceLineNo">682</span>      List&lt;LoadQueueItem&gt; lqis =<a name="line.682"></a>
+<span class="sourceLineNo">683</span>        splitStoreFile(item, FutureUtils.get(conn.getAdmin().getDescriptor(tableName)),<a name="line.683"></a>
+<span class="sourceLineNo">684</span>            startEndKey.getSecond());<a name="line.684"></a>
+<span class="sourceLineNo">685</span>      return new Pair&lt;&gt;(lqis, null);<a name="line.685"></a>
+<span class="sourceLineNo">686</span>    }<a name="line.686"></a>
+<span class="sourceLineNo">687</span><a name="line.687"></a>
+<span class="sourceLineNo">688</span>    // group regions.<a name="line.688"></a>
+<span class="sourceLineNo">689</span>    regionGroups.put(ByteBuffer.wrap(startEndKeys.get(idx).getFirst()), item);<a name="line.689"></a>
+<span class="sourceLineNo">690</span>    return null;<a name="line.690"></a>
+<span class="sourceLineNo">691</span>  }<a name="line.691"></a>
+<span class="sourceLineNo">692</span><a name="line.692"></a>
+<span class="sourceLineNo">693</span>  /**<a name="line.693"></a>
+<span class="sourceLineNo">694</span>   * Split a storefile into a top and bottom half, maintaining the metadata, recreating bloom<a name="line.694"></a>
+<span class="sourceLineNo">695</span>   * filters, etc.<a name="line.695"></a>
+<span class="sourceLineNo">696</span>   */<a name="line.696"></a>
+<span class="sourceLineNo">697</span>  @VisibleForTesting<a name="line.697"></a>
+<span class="sourceLineNo">698</span>  static void splitStoreFile(Configuration conf, Path inFile, ColumnFamilyDescriptor familyDesc,<a name="line.698"></a>
+<span class="sourceLineNo">699</span>      byte[] splitKey, Path bottomOut, Path topOut) throws IOException {<a name="line.699"></a>
+<span class="sourceLineNo">700</span>    // Open reader with no block cache, and not in-memory<a name="line.700"></a>
+<span class="sourceLineNo">701</span>    Reference topReference = Reference.createTopReference(splitKey);<a name="line.701"></a>
+<span class="sourceLineNo">702</span>    Reference bottomReference = Reference.createBottomReference(splitKey);<a name="line.702"></a>
+<span class="sourceLineNo">703</span><a name="line.703"></a>
+<span class="sourceLineNo">704</span>    copyHFileHalf(conf, inFile, topOut, topReference, familyDesc);<a name="line.704"></a>
+<span class="sourceLineNo">705</span>    copyHFileHalf(conf, inFile, bottomOut, bottomReference, familyDesc);<a name="line.705"></a>
+<span class="sourceLineNo">706</span>  }<a name="line.706"></a>
+<span class="sourceLineNo">707</span><a name="line.707"></a>
+<span class="sourceLineNo">708</span>  /**<a name="line.708"></a>
+<span class="sourceLineNo">709</span>   * Copy half of an HFile into a new HFile.<a name="line.709"></a>
+<span class="sourceLineNo">710</span>   */<a name="line.710"></a>
+<span class="sourceLineNo">711</span>  private static void copyHFileHalf(Configuration conf, Path inFile, Path outFile,<a name="line.711"></a>
+<span class="sourceLineNo">712</span>      Reference reference, ColumnFamilyDescriptor familyDescriptor) throws IOException {<a name="line.712"></a>
+<span class="sourceLineNo">713</span>    FileSystem fs = inFile.getFileSystem(conf);<a name="line.713"></a>
+<span class="sourceLineNo">714</span>    CacheConfig cacheConf = CacheConfig.DISABLED;<a name="line.714"></a>
+<span class="sourceLineNo">715</span>    HalfStoreFileReader halfReader = null;<a name="line.715"></a>
+<span class="sourceLineNo">716</span>    StoreFileWriter halfWriter = null;<a name="line.716"></a>
+<span class="sourceLineNo">717</span>    try {<a name="line.717"></a>
+<span class="sourceLineNo">718</span>      ReaderContext context = new ReaderContextBuilder()<a name="line.718"></a>
+<span class="sourceLineNo">719</span>          .withFileSystemAndPath(fs, inFile).build();<a name="line.719"></a>
+<span class="sourceLineNo">720</span>      HFileInfo hfile = new HFileInfo(context, conf);<a name="line.720"></a>
+<span class="sourceLineNo">721</span>      halfReader = new HalfStoreFileReader(context, hfile, cacheConf, reference,<a name="line.721"></a>
+<span class="sourceLineNo">722</span>        new AtomicInteger(0), conf);<a name="line.722"></a>
+<span class="sourceLineNo">723</span>      hfile.initMetaAndIndex(halfReader.getHFileReader());<a name="line.723"></a>
+<span class="sourceLineNo">724</span>      Map&lt;byte[], byte[]&gt; fileInfo = halfReader.loadFileInfo();<a name="line.724"></a>
+<span class="sourceLineNo">725</span><a name="line.725"></a>
+<span class="sourceLineNo">726</span>      int blocksize = familyDescriptor.getBlocksize();<a name="line.726"></a>
+<span class="sourceLineNo">727</span>      Algorithm compression = familyDescriptor.getCompressionType();<a name="line.727"></a>
+<span class="sourceLineNo">728</span>      BloomType bloomFilterType = familyDescriptor.getBloomFilterType();<a name="line.728"></a>
+<span class="sourceLineNo">729</span>      HFileContext hFileContext = new HFileContextBuilder().withCompression(compression)<a name="line.729"></a>
+<span class="sourceLineNo">730</span>        .withChecksumType(HStore.getChecksumType(conf))<a name="line.730"></a>
+<span class="sourceLineNo">731</span>        .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf)).withBlockSize(blocksize)<a name="line.731"></a>
+<span class="sourceLineNo">732</span>        .withDataBlockEncoding(familyDescriptor.getDataBlockEncoding()).withIncludesTags(true)<a name="line.732"></a>
+<span class="sourceLineNo">733</span>        .build();<a name="line.733"></a>
+<span class="sourceLineNo">734</span>      halfWriter = new StoreFileWriter.Builder(conf, cacheConf, fs).withFilePath(outFile)<a name="line.734"></a>
+<span class="sourceLineNo">735</span>        .withBloomType(bloomFilterType).withFileContext(hFileContext).build();<a name="line.735"></a>
+<span class="sourceLineNo">736</span>      HFileScanner scanner = halfReader.getScanner(false, false, false);<a name="line.736"></a>
+<span class="sourceLineNo">737</span>      scanner.seekTo();<a name="line.737"></a>
+<span class="sourceLineNo">738</span>      do {<a name="line.738"></a>
+<span class="sourceLineNo">739</span>        halfWriter.append(scanner.getCell());<a name="line.739"></a>
+<span class="sourceLineNo">740</span>      } while (scanner.next());<a name="line.740"></a>
+<span class="sourceLineNo">741</span><a name="line.741"></a>
+<span class="sourceLineNo">742</span>      for (Map.Entry&lt;byte[], byte[]&gt; entry : fileInfo.entrySet()) {<a name="line.742"></a>
+<span class="sourceLineNo">743</span>        if (shouldCopyHFileMetaKey(entry.getKey())) {<a name="line.743"></a>
+<span class="sourceLineNo">744</span>          halfWriter.appendFileInfo(entry.getKey(), entry.getValue());<a name="line.744"></a>
+<span class="sourceLineNo">745</span>        }<a name="line.745"></a>
+<span class="sourceLineNo">746</span>      }<a name="line.746"></a>
+<span class="sourceLineNo">747</span>    } finally {<a name="line.747"></a>
+<span class="sourceLineNo">748</span>      if (halfReader != null) {<a name="line.748"></a>
+<span class="sourceLineNo">749</span>        try {<a name="line.749"></a>
+<span class="sourceLineNo">750</span>          halfReader.close(cacheConf.shouldEvictOnClose());<a name="line.750"></a>
+<span class="sourceLineNo">751</span>        } catch (IOException e) {<a name="line.751"></a>
+<span class="sourceLineNo">752</span>          LOG.warn("failed to close hfile reader for " + inFile, e);<a name="line.752"></a>
+<span class="sourceLineNo">753</span>        }<a name="line.753"></a>
+<span class="sourceLineNo">754</span>      }<a name="line.754"></a>
+<span class="sourceLineNo">755</span>      if (halfWriter != null) {<a name="line.755"></a>
+<span class="sourceLineNo">756</span>        halfWriter.close();<a name="line.756"></a>
+<span class="sourceLineNo">757</span>      }<a name="line.757"></a>
+<span class="sourceLineNo">758</span>    }<a name="line.758"></a>
+<span class="sourceLineNo">759</span>  }<a name="line.759"></a>
+<span class="sourceLineNo">760</span><a name="line.760"></a>
+<span class="sourceLineNo">761</span>  /**<a name="line.761"></a>
+<span class="sourceLineNo">762</span>   * Infers region boundaries for a new table.<a name="line.762"></a>
+<span class="sourceLineNo">763</span>   * &lt;p/&gt;<a name="line.763"></a>
+<span class="sourceLineNo">764</span>   * Parameter: &lt;br/&gt;<a name="line.764"></a>
+<span class="sourceLineNo">765</span>   * bdryMap is a map between keys to an integer belonging to {+1, -1}<a name="line.765"></a>
+<span class="sourceLineNo">766</span>   * &lt;ul&gt;<a name="line.766"></a>
+<span class="sourceLineNo">767</span>   * &lt;li&gt;If a key is a start key of a file, then it maps to +1&lt;/li&gt;<a name="line.767"></a>
+<span class="sourceLineNo">768</span>   * &lt;li&gt;If a key is an end key of a file, then it maps to -1&lt;/li&gt;<a name="line.768"></a>
+<span class="sourceLineNo">769</span>   * &lt;/ul&gt;<a name="line.769"></a>
+<span class="sourceLineNo">770</span>   * &lt;p&gt;<a name="line.770"></a>
+<span class="sourceLineNo">771</span>   * Algo:&lt;br/&gt;<a name="line.771"></a>
+<span class="sourceLineNo">772</span>   * &lt;ol&gt;<a name="line.772"></a>
+<span class="sourceLineNo">773</span>   * &lt;li&gt;Poll on the keys in order:<a name="line.773"></a>
+<span class="sourceLineNo">774</span>   * &lt;ol type="a"&gt;<a name="line.774"></a>
+<span class="sourceLineNo">775</span>   * &lt;li&gt;Keep adding the mapped values to these keys (runningSum)&lt;/li&gt;<a name="line.775"></a>
+<span class="sourceLineNo">776</span>   * &lt;li&gt;Each time runningSum reaches 0, add the start Key from when the runningSum had started to a<a name="line.776"></a>
+<span class="sourceLineNo">777</span>   * boundary list.&lt;/li&gt;<a name="line.777"></a>
+<span class="sourceLineNo">778</span>   * &lt;/ol&gt;<a name="line.778"></a>
+<span class="sourceLineNo">779</span>   * &lt;/li&gt;<a name="line.779"></a>
+<span class="sourceLineNo">780</span>   * &lt;li&gt;Return the boundary list.&lt;/li&gt;<a name="line.780"></a>
+<span class="sourceLineNo">781</span>   * &lt;/ol&gt;<a name="line.781"></a>
+<span class="sourceLineNo">782</span>   */<a name="line.782"></a>
+<span class="sourceLineNo">783</span>  public static byte[][] inferBoundaries(SortedMap&lt;byte[], Integer&gt; bdryMap) {<a name="line.783"></a>
+<span class="sourceLineNo">784</span>    List&lt;byte[]&gt; keysArray = new ArrayList&lt;&gt;();<a name="line.784"></a>
+<span class="sourceLineNo">785</span>    int runningValue = 0;<a name="line.785"></a>
+<span class="sourceLineNo">786</span>    byte[] currStartKey = null;<a name="line.786"></a>
+<span class="sourceLineNo">787</span>    boolean firstBoundary = true;<a name="line.787"></a>
+<span class="sourceLineNo">788</span><a name="line.788"></a>
+<span class="sourceLineNo">789</span>    for (Map.Entry&lt;byte[], Integer&gt; item : bdryMap.entrySet()) {<a name="line.789"></a>
+<span class="sourceLineNo">790</span>      if (runningValue == 0) {<a name="line.790"></a>
+<span class="sourceLineNo">791</span>        currStartKey = item.getKey();<a name="line.791"></a>
+<span class="sourceLineNo">792</span>      }<a name="line.792"></a>
+<span class="sourceLineNo">793</span>      runningValue += item.getValue();<a name="line.793"></a>
+<span class="sourceLineNo">794</span>      if (runningValue == 0) {<a name="line.794"></a>
+<span class="sourceLineNo">795</span>        if (!firstBoundary) {<a name="line.795"></a>
+<span class="sourceLineNo">796</span>          keysArray.add(currStartKey);<a name="line.796"></a>
+<span class="sourceLineNo">797</span>        }<a name="line.797"></a>
+<span class="sourceLineNo">798</span>        firstBoundary = false;<a name="line.798"></a>
+<span class="sourceLineNo">799</span>      }<a name="line.799"></a>
+<span class="sourceLineNo">800</span>    }<a name="line.800"></a>
+<span class="sourceLineNo">801</span><a name="line.801"></a>
+<span class="sourceLineNo">802</span>    return keysArray.toArray(new byte[0][]);<a name="line.802"></a>
+<span class="sourceLineNo">803</span>  }<a name="line.803"></a>
+<span class="sourceLineNo">804</span><a name="line.804"></a>
+<span class="sourceLineNo">805</span>  /**<a name="line.805"></a>
+<span class="sourceLineNo">806</span>   * If the table is created for the first time, then "completebulkload" reads the files twice. More<a name="line.806"></a>
+<span class="sourceLineNo">807</span>   * modifications necessary if we want to avoid doing it.<a name="line.807"></a>
+<span class="sourceLineNo">808</span>   */<a name="line.808"></a>
+<span class="sourceLineNo">809</span>  private void createTable(TableName tableName, Path hfofDir, AsyncAdmin admin) throws IOException {<a name="line.809"></a>
+<span class="sourceLineNo">810</span>    final FileSystem fs = hfofDir.getFileSystem(getConf());<a name="line.810"></a>
+<span class="sourceLineNo">811</span><a name="line.811"></a>
+<span class="sourceLineNo">812</span>    // Add column families<a name="line.812"></a>
+<span class="sourceLineNo">813</span>    // Build a set of keys<a name="line.813"></a>
+<span class="sourceLineNo">814</span>    List&lt;ColumnFamilyDescriptorBuilder&gt; familyBuilders = new ArrayList&lt;&gt;();<a name="line.814"></a>
+<span class="sourceLineNo">815</span>    SortedMap&lt;byte[], Integer&gt; map = new TreeMap&lt;&gt;(Bytes.BYTES_COMPARATOR);<a name="line.815"></a>
+<span class="sourceLineNo">816</span>    visitBulkHFiles(fs, hfofDir, new BulkHFileVisitor&lt;ColumnFamilyDescriptorBuilder&gt;() {<a name="line.816"></a>
+<span class="sourceLineNo">817</span>      @Override<a name="line.817"></a>
+<span class="sourceLineNo">818</span>      public ColumnFamilyDescriptorBuilder bulkFamily(byte[] familyName) {<a name="line.818"></a>
+<span class="sourceLineNo">819</span>        ColumnFamilyDescriptorBuilder builder =<a name="line.819"></a>
+<span class="sourceLineNo">820</span>          ColumnFamilyDescriptorBuilder.newBuilder(familyName);<a name="line.820"></a>
+<span class="sourceLineNo">821</span>        familyBuilders.add(builder);<a name="line.821"></a>
+<span class="sourceLineNo">822</span>        return builder;<a name="line.822"></a>
+<span class="sourceLineNo">823</span>      }<a name="line.823"></a>
+<span class="sourceLineNo">824</span><a name="line.824"></a>
+<span class="sourceLineNo">825</span>      @Override<a name="line.825"></a>
+<span class="sourceLineNo">826</span>      public void bulkHFile(ColumnFamilyDescriptorBuilder builder, FileStatus hfileStatus)<a name="line.826"></a>
+<span class="sourceLineNo">827</span>          throws IOException {<a name="line.827"></a>
+<span class="sourceLineNo">828</span>        Path hfile = hfileStatus.getPath();<a name="line.828"></a>
+<span class="sourceLineNo">829</span>        try (HFile.Reader reader =<a name="line.829"></a>
+<span class="sourceLineNo">830</span>          HFile.createReader(fs, hfile, CacheConfig.DISABLED, true, getConf())) {<a name="line.830"></a>
+<span class="sourceLineNo">831</span>          if (builder.getCompressionType() != reader.getFileContext().getCompression()) {<a name="line.831"></a>
+<span class="sourceLineNo">832</span>            builder.setCompressionType(reader.getFileContext().getCompression());<a name="line.832"></a>
+<span class="sourceLineNo">833</span>            LOG.info("Setting compression " + reader.getFileContext().getCompression().name() +<a name="line.833"></a>
+<span class="sourceLineNo">834</span>              " for family " + builder.getNameAsString());<a name="line.834"></a>
+<span class="sourceLineNo">835</span>          }<a name="line.835"></a>
+<span class="sourceLineNo">836</span>          byte[] first = reader.getFirstRowKey().get();<a name="line.836"></a>
+<span class="sourceLineNo">837</span>          byte[] last = reader.getLastRowKey().get();<a name="line.837"></a>
+<span class="sourceLineNo">838</span><a name="line.838"></a>
+<span class="sourceLineNo">839</span>          LOG.info("Trying to figure out region boundaries hfile=" + hfile + " first=" +<a name="line.839"></a>
+<span class="sourceLineNo">840</span>            Bytes.toStringBinary(first) + " last=" + Bytes.toStringBinary(last));<a name="line.840"></a>
+<span class="sourceLineNo">841</span><a name="line.841"></a>
+<span class="sourceLineNo">842</span>          // To eventually infer start key-end key boundaries<a name="line.842"></a>
+<span class="sourceLineNo">843</span>          Integer value = map.getOrDefault(first, 0);<a name="line.843"></a>
+<span class="sourceLineNo">844</span>          map.put(first, value + 1);<a name="line.844"></a>
+<span class="sourceLineNo">845</span><a name="line.845"></a>
+<span class="sourceLineNo">846</span>          value = map.containsKey(last) ? map.get(last) : 0;<a name="line.846"></a>
+<span class="sourceLineNo">847</span>          map.put(last, value - 1);<a name="line.847"></a>
+<span class="sourceLineNo">848</span>        }<a name="line.848"></a>
+<span class="sourceLineNo">849</span>      }<a name="line.849"></a>
+<span class="sourceLineNo">850</span>    }, true);<a name="line.850"></a>
 <span class="sourceLineNo">851</span><a name="line.851"></a>
-<span class="sourceLineNo">852</span>          value = map.containsKey(last) ? map.get(last) : 0;<a name="line.852"></a>
-<span class="sourceLineNo">853</span>          map.put(last, value - 1);<a name="line.853"></a>
-<span class="sourceLineNo">854</span>        }<a name="line.854"></a>
-<span class="sourceLineNo">855</span>      }<a name="line.855"></a>
-<span class="sourceLineNo">856</span>    }, true);<a name="line.856"></a>
+<span class="sourceLineNo">852</span>    byte[][] keys = inferBoundaries(map);<a name="line.852"></a>
+<span class="sourceLineNo">853</span>    TableDescriptorBuilder tdBuilder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.853"></a>
+<span class="sourceLineNo">854</span>    familyBuilders.stream().map(ColumnFamilyDescriptorBuilder::build)<a name="line.854"></a>
+<span class="sourceLineNo">855</span>      .forEachOrdered(tdBuilder::setColumnFamily);<a name="line.855"></a>
+<span class="sourceLineNo">856</span>    FutureUtils.get(admin.createTable(tdBuilder.build(), keys));<a name="line.856"></a>
 <span class="sourceLineNo">857</span><a name="line.857"></a>
-<span class="sourceLineNo">858</span>    byte[][] keys = inferBoundaries(map);<a name="line.858"></a>
-<span class="sourceLineNo">859</span>    TableDescriptorBuilder tdBuilder = TableDescriptorBuilder.newBuilder(tableName);<a name="line.859"></a>
-<span class="sourceLineNo">860</span>    familyBuilders.stream().map(ColumnFamilyDescriptorBuilder::build)<a name="line.860"></a>
-<span class="sourceLineNo">861</span>      .forEachOrdered(tdBuilder::setColumnFamily);<a name="line.861"></a>
-<span class="sourceLineNo">862</span>    FutureUtils.get(admin.createTable(tdBuilder.build(), keys));<a name="line.862"></a>
-<span class="sourceLineNo">863</span><a name="line.863"></a>
-<span class="sourceLineNo">864</span>    LOG.info("Table " + tableName + " is available!!");<a name="line.864"></a>
-<span class="sourceLineNo">865</span>  }<a name="line.865"></a>
-<span class="sourceLineNo">866</span><a name="line.866"></a>
-<span class="sourceLineNo">867</span>  private Map&lt;LoadQueueItem, ByteBuffer&gt; performBulkLoad(AsyncClusterConnection conn,<a name="line.867"></a>
-<span class="sourceLineNo">868</span>      TableName tableName, Deque&lt;LoadQueueItem&gt; queue, ExecutorService pool, boolean copyFile)<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      throws IOException {<a name="line.869"></a>
-<span class="sourceLineNo">870</span>    int count = 0;<a name="line.870"></a>
-<span class="sourceLineNo">871</span><a name="line.871"></a>
-<span class="sourceLineNo">872</span>    fsDelegationToken.acquireDelegationToken(queue.peek().getFilePath().getFileSystem(getConf()));<a name="line.872"></a>
-<span class="sourceLineNo">873</span>    bulkToken = FutureUtils.get(conn.prepareBulkLoad(tableName));<a name="line.873"></a>
-<span class="sourceLineNo">874</span>    Pair&lt;Multimap&lt;ByteBuffer, LoadQueueItem&gt;, Set&lt;String&gt;&gt; pair = null;<a name="line.874"></a>
-<span class="sourceLineNo">875</span><a name="line.875"></a>
-<span class="sourceLineNo">876</span>    Map&lt;LoadQueueItem, ByteBuffer&gt; item2RegionMap = new HashMap&lt;&gt;();<a name="line.876"></a>
-<span class="sourceLineNo">877</span>    // Assumes that region splits can happen while this occurs.<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    while (!queue.isEmpty()) {<a name="line.878"></a>
-<span class="sourceLineNo">879</span>      // need to reload split keys each iteration.<a name="line.879"></a>
-<span class="sourceLineNo">880</span>      final List&lt;Pair&lt;byte[], byte[]&gt;&gt; startEndKeys =<a name="line.880"></a>
-<span class="sourceLineNo">881</span>        FutureUtils.get(conn.getRegionLocator(tableName).getStartEndKeys());<a name="line.881"></a>
-<span class="sourceLineNo">882</span>      if (count != 0) {<a name="line.882"></a>
-<span class="sourceLineNo">883</span>        LOG.info("Split occurred while grouping HFiles, retry attempt " + count + " with " +<a name="line.883"></a>
-<span class="sourceLineNo">884</span>          queue.size() + " files remaining to group or split");<a name="line.884"></a>
-<span class="sourceLineNo">885</span>      }<a name="line.885"></a>
-<span class="sourceLineNo">886</span><a name="line.886"></a>
-<span class="sourceLineNo">887</span>      int maxRetries = getConf().getInt(HConstants.BULKLOAD_MAX_RETRIES_NUMBER, 10);<a name="line.887"></a>
-<span class="sourceLineNo">888</span>      maxRetries = Math.max(maxRetries, startEndKeys.size() + 1);<a name="line.888"></a>
-<span class="sourceLineNo">889</span>      if (maxRetries != 0 &amp;&amp; count &gt;= maxRetries) {<a name="line.889"></a>
-<span class="sourceLineNo">890</span>        throw new IOException(<a name="line.890"></a>
-<span class="sourceLineNo">891</span>          "Retry attempted " + count + " times without completing, bailing out");<a name="line.891"></a>
-<span class="sourceLineNo">892</span>      }<a name="line.892"></a>
-<span class="sourceLineNo">893</span>      count++;<a name="line.893"></a>
-<span class="sourceLineNo">894</span><a name="line.894"></a>
-<span class="sourceLineNo">895</span>      // Using ByteBuffer for byte[] equality semantics<a name="line.895"></a>
-<span class="sourceLineNo">896</span>      pair = groupOrSplitPhase(conn, tableName, pool, queue, startEndKeys);<a name="line.896"></a>
-<span class="sourceLineNo">897</span>      Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups = pair.getFirst();<a name="line.897"></a>
+<span class="sourceLineNo">858</span>    LOG.info("Table " + tableName + " is available!!");<a name="line.858"></a>
+<span class="sourceLineNo">859</span>  }<a name="line.859"></a>
+<span class="sourceLineNo">860</span><a name="line.860"></a>
+<span class="sourceLineNo">861</span>  private Map&lt;LoadQueueItem, ByteBuffer&gt; performBulkLoad(AsyncClusterConnection conn,<a name="line.861"></a>
+<span class="sourceLineNo">862</span>      TableName tableName, Deque&lt;LoadQueueItem&gt; queue, ExecutorService pool, boolean copyFile)<a name="line.862"></a>
+<span class="sourceLineNo">863</span>      throws IOException {<a name="line.863"></a>
+<span class="sourceLineNo">864</span>    int count = 0;<a name="line.864"></a>
+<span class="sourceLineNo">865</span><a name="line.865"></a>
+<span class="sourceLineNo">866</span>    fsDelegationToken.acquireDelegationToken(queue.peek().getFilePath().getFileSystem(getConf()));<a name="line.866"></a>
+<span class="sourceLineNo">867</span>    bulkToken = FutureUtils.get(conn.prepareBulkLoad(tableName));<a name="line.867"></a>
+<span class="sourceLineNo">868</span>    Pair&lt;Multimap&lt;ByteBuffer, LoadQueueItem&gt;, Set&lt;String&gt;&gt; pair = null;<a name="line.868"></a>
+<span class="sourceLineNo">869</span><a name="line.869"></a>
+<span class="sourceLineNo">870</span>    Map&lt;LoadQueueItem, ByteBuffer&gt; item2RegionMap = new HashMap&lt;&gt;();<a name="line.870"></a>
+<span class="sourceLineNo">871</span>    // Assumes that region splits can happen while this occurs.<a name="line.871"></a>
+<span class="sourceLineNo">872</span>    while (!queue.isEmpty()) {<a name="line.872"></a>
+<span class="sourceLineNo">873</span>      // need to reload split keys each iteration.<a name="line.873"></a>
+<span class="sourceLineNo">874</span>      final List&lt;Pair&lt;byte[], byte[]&gt;&gt; startEndKeys =<a name="line.874"></a>
+<span class="sourceLineNo">875</span>        FutureUtils.get(conn.getRegionLocator(tableName).getStartEndKeys());<a name="line.875"></a>
+<span class="sourceLineNo">876</span>      if (count != 0) {<a name="line.876"></a>
+<span class="sourceLineNo">877</span>        LOG.info("Split occurred while grouping HFiles, retry attempt " + count + " with " +<a name="line.877"></a>
+<span class="sourceLineNo">878</span>          queue.size() + " files remaining to group or split");<a name="line.878"></a>
+<span class="sourceLineNo">879</span>      }<a name="line.879"></a>
+<span class="sourceLineNo">880</span><a name="line.880"></a>
+<span class="sourceLineNo">881</span>      int maxRetries = getConf().getInt(HConstants.BULKLOAD_MAX_RETRIES_NUMBER, 10);<a name="line.881"></a>
+<span class="sourceLineNo">882</span>      maxRetries = Math.max(maxRetries, startEndKeys.size() + 1);<a name="line.882"></a>
+<span class="sourceLineNo">883</span>      if (maxRetries != 0 &amp;&amp; count &gt;= maxRetries) {<a name="line.883"></a>
+<span class="sourceLineNo">884</span>        throw new IOException(<a name="line.884"></a>
+<span class="sourceLineNo">885</span>          "Retry attempted " + count + " times without completing, bailing out");<a name="line.885"></a>
+<span class="sourceLineNo">886</span>      }<a name="line.886"></a>
+<span class="sourceLineNo">887</span>      count++;<a name="line.887"></a>
+<span class="sourceLineNo">888</span><a name="line.888"></a>
+<span class="sourceLineNo">889</span>      // Using ByteBuffer for byte[] equality semantics<a name="line.889"></a>
+<span class="sourceLineNo">890</span>      pair = groupOrSplitPhase(conn, tableName, pool, queue, startEndKeys);<a name="line.890"></a>
+<span class="sourceLineNo">891</span>      Multimap&lt;ByteBuffer, LoadQueueItem&gt; regionGroups = pair.getFirst();<a name="line.891"></a>
+<span class="sourceLineNo">892</span><a name="line.892"></a>
+<span class="sourceLineNo">893</span>      if (!checkHFilesCountPerRegionPerFamily(regionGroups)) {<a name="line.893"></a>
+<span class="sourceLineNo">894</span>        // Error is logged inside checkHFilesCountPerRegionPerFamily.<a name="line.894"></a>
+<span class="sourceLineNo">895</span>        throw new IOException("Trying to load more than " + maxFilesPerRegionPerFamily +<a name="line.895"></a>
+<span class="sourceLineNo">896</span>          " hfiles to one family of one region");<a name="line.896"></a>
+<span class="sourceLineNo">897</span>      }<a name="line.897"></a>
 <span class="sourceLineNo">898</span><a name="line.898"></a>
-<span class="sourceLineNo">899</span>      if (!checkHFilesCountPerRegionPerFamily(regionGroups)) {<a name="line.899"></a>
-<span class="sourceLineNo">900</span>        // Error is logged inside checkHFilesCountPerRegionPerFamily.<a name="line.900"></a>
-<span class="sourceLineNo">901</span>        throw new IOException("Trying to load more than " + maxFilesPerRegionPerFamily +<a name="line.901"></a>
-<span class="sourceLineNo">902</span>          " hfiles to one family of one region");<a name="line.902"></a>
-<span class="sourceLineNo">903</span>      }<a name="line.903"></a>
-<span class="sourceLineNo">904</span><a name="line.904"></a>
-<span class="sourceLineNo">905</span>      bulkLoadPhase(conn, tableName, queue, regionGroups, copyFile, item2RegionMap);<a name="line.905"></a>
-<span class="sourceLineNo">906</span><a name="line.906"></a>
-<span class="sourceLineNo">907</span>      // NOTE: The next iteration's split / group could happen in parallel to<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      // atomic bulkloads assuming that there are splits and no merges, and<a name="line.908"></a>
-<span class="sourceLineNo">909</span>      // that we can atomically pull out the groups we want to retry.<a name="line.909"></a>
-<span class="sourceLineNo">910</span>    }<a name="line.910"></a>
-<span class="sourceLineNo">911</span><a name="line.911"></a>
-<span class="sourceLineNo">912</span>    return item2RegionMap;<a name="line.912"></a>
-<span class="sourceLineNo">913</span>  }<a name="line.913"></a>
-<span class="sourceLineNo">914</span><a name="line.914"></a>
-<span class="sourceLineNo">915</span>  private void cleanup(AsyncClusterConnection conn, TableName tableName, Deque&lt;LoadQueueItem&gt; queue,<a name="line.915"></a>
-<span class="sourceLineNo">916</span>      ExecutorService pool) throws IOException {<a name="line.916"></a>
-<span class="sourceLineNo">917</span>    fsDelegationToken.releaseDelegationToken();<a name="line.917"></a>
-<span class="sourceLineNo">918</span>    if (bulkToken != null) {<a name="line.918"></a>
-<span class="sourceLineNo">919</span>      conn.cleanupBulkLoad(tableName, bulkToken);<a name="line.919"></a>
-<span class="sourceLineNo">920</span>    }<a name="line.920"></a>
-<span class="sourceLineNo">921</span>    if (pool != null) {<a name="line.921"></a>
-<span class="sourceLineNo">922</span>      pool.shutdown();<a name="line.922"></a>
-<span class="sourceLineNo">923</span>    }<a name="line.923"></a>
-<span class="sourceLineNo">924</span>    if (!queue.isEmpty()) {<a name="line.924"></a>
-<span class="sourceLineNo">925</span>      StringBuilder err = new StringBuilder();<a name="line.925"></a>
-<span class="sourceLineNo">926</span>      err.append("-------------------------------------------------\n");<a name="line.926"></a>
-<span class="sourceLineNo">927</span>      err.append("Bulk load aborted with some files not yet loaded:\n");<a name="line.927"></a>
-<span class="sourceLineNo">928</span>      err.append("-------------------------------------------------\n");<a name="line.928"></a>
-<span class="sourceLineNo">929</span>      for (LoadQueueItem q : queue) {<a name="line.929"></a>
-<span class="sourceLineNo">930</span>        err.append("  ").append(q.getFilePath()).append('\n');<a name="line.930"></a>
-<span class="sourceLineNo">931</span>      }<a name="line.931"></a>
-<span class="sourceLineNo">932</span>      LOG.error(err.toString());<a name="line.932"></a>
-<span class="sourceLineNo">933</span>    }<a name="line.933"></a>
-<span class="sourceLineNo">934</span>  }<a name="line.934"></a>
-<span class="sourceLineNo">935</span><a name="line.935"></a>
-<span class="sourceLineNo">936</span>  /**<a name="line.936"></a>
-<span class="sourceLineNo">937</span>   * Perform a bulk load of the given map of families to hfiles into the given pre-existing table.<a name="line.937"></a>
-<span class="sourceLineNo">938</span>   * This method is not threadsafe.<a name="line.938"></a>
-<span class="sourceLineNo">939</span>   * @param map map of family to List of hfiles<a name="line.939"></a>
-<span class="sourceLineNo">940</span>   * @param tableName table to load the hfiles<a name="line.940"></a>
-<span class="sourceLineNo">941</span>   * @param silence true to ignore unmatched column families<a name="line.941"></a>
-<span class="sourceLineNo">942</span>   * @param copyFile always copy hfiles if true<a name="line.942"></a>
-<span class="sourceLineNo">943</span>   */<a name="line.943"></a>
-<span class="sourceLineNo">944</span>  private Map&lt;LoadQueueItem, ByteBuffer&gt; doBulkLoad(AsyncClusterConnection conn,<a name="line.944"></a>
-<span class="sourceLineNo">945</span>      TableName tableName, Map&lt;byte[], List&lt;Path&gt;&gt; map, boolean silence, boolean copyFile)<a name="line.945"></a>
-<span class="sourceLineNo">946</span>      throws IOException {<a name="line.946"></a>
-<span class="sourceLineNo">947</span>    tableExists(conn, tableName);<a name="line.947"></a>
-<span class="sourceLineNo">948</span>    // LQI queue does not need to be threadsafe -- all operations on this queue<a name="line.948"></a>
-<span class="sourceLineNo">949</span>    // happen in this thread<a name="line.949"></a>
-<span class="sourceLineNo">950</span>    Deque&lt;LoadQueueItem&gt; queue = new ArrayDeque&lt;&gt;();<a name="line.950"></a>
-<span class="sourceLineNo">951</span>    ExecutorService pool = null;<a name="line.951"></a>
-<span class="sourceLineNo">952</span>    try {<a name="line.952"></a>
-<span class="sourceLineNo">953</span>      prepareHFileQueue(conn, tableName, map, queue, silence);<a name="line.953"></a>
-<span class="sourceLineNo">954</span>      if (queue.isEmpty()) {<a name="line.954"></a>
-<span class="sourceLineNo">955</span>        LOG.warn("Bulk load operation did not get any files to load");<a name="line.955"></a>
-<span class="sourceLineNo">956</span>        return Collections.emptyMap();<a name="line.956"></a>
-<span class="sourceLineNo">957</span>      }<a name="line.957"></a>
-<span class="sourceLineNo">958</span>      pool = createExecutorService();<a name="line.958"></a>
-<span class="sourceLineNo">959</span>      return performBulkLoad(conn, tableName, queue, pool, copyFile);<a name="line.959"></a>
-<span class="sourceLineNo">960</span>    } finally {<a name="line.960"></a>
-<span class="sourceLineNo">961</span>      cleanup(conn, tableName, queue, pool);<a name="line.961"></a>
-<span class="sourceLineNo">962</span>    }<a name="line.962"></a>
-<span class="sourceLineNo">963</span>  }<a name="line.963"></a>
-<span class="sourceLineNo">964</span><a name="line.964"></a>
-<span class="sourceLineNo">965</span>  /**<a name="line.965"></a>
-<span class="sourceLineNo">966</span>   * Perform a bulk load of the given directory into the given pre-existing table. This method is<a name="line.966"></a>
-<span class="sourceLineNo">967</span>   * not threadsafe.<a name="line.967"></a>
-<span class="sourceLineNo">968</span>   * @param tableName table to load the hfiles<a name="line.968"></a>
-<span class="sourceLineNo">969</span>   * @param hfofDir the directory that was provided as the output path of a job using<a name="line.969"></a>
-<span class="sourceLineNo">970</span>   *          HFileOutputFormat<a name="line.970"></a>
-<span class="sourceLineNo">971</span>   * @param silence true to ignore unmatched column families<a name="line.971"></a>
-<span class="sourceLineNo">972</span>   * @param copyFile always copy hfiles if true<a name="line.972"></a>
-<span class="sourceLineNo">973</span>   */<a name="line.973"></a>
-<span class="sourceLineNo">974</span>  private Map&lt;LoadQueueItem, ByteBuffer&gt; doBulkLoad(AsyncClusterConnection conn,<a name="line.974"></a>
-<span class="sourceLineNo">975</span>      TableName tableName, Path hfofDir, boolean silence, boolean copyFile)<a name="line.975"></a>
-<span class="sourceLineNo">976</span>      throws IOException {<a name="line.976"></a>
-<span class="sourceLineNo">977</span>    tableExists(conn, tableName);<a name="line.977"></a>
-<span class="sourceLineNo">978</span><a name="line.978"></a>
-<span class="sourceLineNo">979</span>    /*<a name="line.979"></a>
-<span class="sourceLineNo">980</span>     * Checking hfile format is a time-consuming operation, we should have an option to skip this<a name="line.980"></a>
-<span class="sourceLineNo">981</span>     * step when bulkloading millions of HFiles. See HBASE-13985.<a name="line.981"></a>
-<span class="sourceLineNo">982</span>     */<a name="line.982"></a>
-<span class="sourceLineNo">983</span>    boolean validateHFile = getConf().getBoolean(VALIDATE_HFILES, true);<a name="line.983"></a>
-<span class="sourceLineNo">984</span>    if (!validateHFile) {<a name="line.984"></a>
-<span class="sourceLineNo">985</span>      LOG.warn("You are skipping HFiles validation, it might cause some data loss if files " +<a name="line.985"></a>
-<span class="sourceLineNo">986</span>        "are not correct. If you fail to read data from your table after using this " +<a name="line.986"></a>
-<span class="sourceLineNo">987</span>        "option, consider removing the files and bulkload again without this option. " +<a name="line.987"></a>
-<span class="sourceLineNo">988</span>        "See HBASE-13985");<a name="line.988"></a>
-<span class="sourceLineNo">989</span>    }<a name="line.989"></a>
-<span class="sourceLineNo">990</span>    // LQI queue does not need to be threadsafe -- all operations on this queue<a name="line.990"></a>
-<span class="sourceLineNo">991</span>    // happen in this thread<a name="line.991"></a>
-<span class="sourceLineNo">992</span>    Deque&lt;LoadQueueItem&gt; queue = new ArrayDeque&lt;&gt;();<a name="line.992"></a>
-<span class="sourceLineNo">993</span>    ExecutorService pool = null;<a name="line.993"></a>
-<span class="sourceLineNo">994</span>    try {<a name="line.994"></a>
-<span class="sourceLineNo">995</span>      prepareHFileQueue(getConf(), conn, tableName, hfofDir, queue, validateHFile, silence);<a name="line.995"></a>
-<span class="sourceLineNo">996</span><a name="line.996"></a>
-<span class="sourceLineNo">997</span>      if (queue.isEmpty()) {<a name="line.997"></a>
-<span class="sourceLineNo">998</span>        LOG.warn(<a name="line.998"></a>
-<span class="sourceLineNo">999</span>          "Bulk load operation did not find any files to load in directory {}. " +<a name="line.999"></a>
-<span class="sourceLineNo">1000</span>          "Does it contain files in subdirectories that correspond to column family names?",<a name="line.1000"></a>
-<span class="sourceLineNo">1001</span>          (hfofDir != null ? hfofDir.toUri().toString() : ""));<a name="line.1001"></a>
-<span class="sourceLineNo">1002</span>        return Collections.emptyMap();<a name="line.1002"></a>
-<span class="sourceLineNo">1003</span>      }<a name="line.1003"></a>
-<span class="sourceLineNo">1004</span>      pool = createExecutorService();<a name="line.1004"></a>
-<span class="sourceLineNo">1005</span>      return performBulkLoad(conn, tableName, queue, pool, copyFile);<a name="line.1005"></a>
-<span class="sourceLineNo">1006</span>    } finally {<a name="line.1006"></a>
-<span class="sourceLineNo">1007</span>      cleanup(conn, tableName, queue, pool);<a name="line.1007"></a>
-<span class="sourceLineNo">1008</span>    }<a name="line.1008"></a>
-<span class="sourceLineNo">1009</span>  }<a name="line.1009"></a>
-<span class="sourceLineNo">1010</span><a name="line.1010"></a>
-<span class="sourceLineNo">1011</span>  @Override<a name="line.1011"></a>
-<span class="sourceLineNo">1012</span>  public Map&lt;LoadQueueItem, ByteBuffer&gt; bulkLoad(TableName tableName,<a name="line.1012"></a>
-<span class="sourceLineNo">1013</span>      Map&lt;byte[], List&lt;Path&gt;&gt; family2Files) throws IOException {<a name="line.1013"></a>
-<span class="sourceLineNo">1014</span>    try (AsyncClusterConnection conn = ClusterConnectionFactory.<a name="line.1014"></a>
-<span class="sourceLineNo">1015</span>        createAsyncClusterConnection(getConf(), null, userProvider.getCurrent())) {<a name="line.1015"></a>
-<span class="sourceLineNo">1016</span>      return doBulkLoad(conn, tableName, family2Files, isSilence(), isAlwaysCopyFiles());<a name="line.1016"></a>
-<span class="sourceLineNo">1017</span>    }<a name="line.1017"></a>
-<span class="sourceLineNo">1018</span>  }<a name="line.1018"></a>
-<span class="sourceLineNo">1019</span><a name="line.1019"></a>
-<span class="sourceLineNo">1020</span>  @Override<a name="line.1020"></a>
-<span class="sourceLineNo">1021</span>  public Map&lt;LoadQueueItem, ByteBuffer&gt; bulkLoad(TableName tableName, Path dir)<a name="line.1021"></a>
-<span class="sourceLineNo">1022</span>      throws IOException {<a name="line.1022"></a>
-<span class="sourceLineNo">1023</span>    try (AsyncClusterConnection conn = ClusterConnectionFactory<a name="line.1023"></a>
-<span class="sourceLineNo">1024</span>      .createAsyncClusterConnection(getConf(), null, userProvider.getCurrent())) {<a name="line.1024"></a>
-<span class="sourceLineNo">1025</span>      AsyncAdmin admin = conn.getAdmin();<a name="line.1025"></a>
-<span class="sourceLineNo">1026</span>      if (!FutureUtils.get(admin.tableExists(tableName))) {<a name="line.1026"></a>
-<span class="sourceLineNo">1027</span>        if (isCreateTable()) {<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>          createTable(tableName, dir, admin);<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>        } else {<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>          throwAndLogTableNotFoundException(tableName);<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>        }<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>      }<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>      return doBulkLoad(conn, tableName, dir, isSilence(), isAlwaysCopyFiles());<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>    }<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>  }<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span><a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>  /**<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span>   * @throws TableNotFoundException if table does not exist.<a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>   */<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>  private void tableExists(AsyncClusterConnection conn, TableName tableName) throws IOException {<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>    if (!FutureUtils.get(conn.getAdmin().tableExists(tableName))) {<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>      throwAndLogTableNotFoundException(tableName);<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>    }<a name="line.1043"></a>
+<span class="sourceLineNo">899</span>      bulkLoadPhase(conn, tableName, queue, regionGroups, copyFile, item2RegionMap);<a name="line.899"></a>
+<span class="sourceLineNo">900</span><a name="line.900"></a>
+<span class="sourceLineNo">901</span>      // NOTE: The next iteration's split / group could happen in parallel to<a name="line.901"></a>
+<span class="sourceLineNo">902</span>      // atomic bulkloads assuming that there are splits and no merges, and<a name="line.902"></a>
+<span class="sourceLineNo">903</span>      // that we can atomically pull out the groups we want to retry.<a name="line.903"></a>
+<span class="sourceLineNo">904</span>    }<a name="line.904"></a>
+<span class="sourceLineNo">905</span><a name="line.905"></a>
+<span class="sourceLineNo">906</span>    return item2RegionMap;<a name="line.906"></a>
+<span class="sourceLineNo">907</span>  }<a name="line.907"></a>
+<span class="sourceLineNo">908</span><a name="line.908"></a>
+<span class="sourceLineNo">909</span>  private void cleanup(AsyncClusterConnection conn, TableName tableName, Deque&lt;LoadQueueItem&gt; queue,<a name="line.909"></a>
+<span class="sourceLineNo">910</span>      ExecutorService pool) throws IOException {<a name="line.910"></a>
+<span class="sourceLineNo">911</span>    fsDelegationToken.releaseDelegationToken();<a name="line.911"></a>
+<span class="sourceLineNo">912</span>    if (bulkToken != null) {<a name="line.912"></a>
+<span class="sourceLineNo">913</span>      conn.cleanupBulkLoad(tableName, bulkToken);<a name="line.913"></a>
+<span class="sourceLineNo">914</span>    }<a name="line.914"></a>
+<span class="sourceLineNo">915</span>    if (pool != null) {<a name="line.915"></a>
+<span class="sourceLineNo">916</span>      pool.shutdown();<a name="line.916"></a>
+<span class="sourceLineNo">917</span>    }<a name="line.917"></a>
+<span class="sourceLineNo">918</span>    if (!queue.isEmpty()) {<a name="line.918"></a>
+<span class="sourceLineNo">919</span>      StringBuilder err = new StringBuilder();<a name="line.919"></a>
+<span class="sourceLineNo">920</span>      err.append("-------------------------------------------------\n");<a name="line.920"></a>
+<span class="sourceLineNo">921</span>      err.append("Bulk load aborted with some files not yet loaded:\n");<a name="line.921"></a>
+<span class="sourceLineNo">922</span>      err.append("-------------------------------------------------\n");<a name="line.922"></a>
+<span class="sourceLineNo">923</span>      for (LoadQueueItem q : queue) {<a name="line.923"></a>
+<span class="sourceLineNo">924</span>        err.append("  ").append(q.getFilePath()).append('\n');<a name="line.924"></a>
+<span class="sourceLineNo">925</span>      }<a name="line.925"></a>
+<span class="sourceLineNo">926</span>      LOG.error(err.toString());<a name="line.926"></a>
+<span class="sourceLineNo">927</span>    }<a name="line.927"></a>
+<span class="sourceLineNo">928</span>  }<a name="line.928"></a>
+<span class="sourceLineNo">929</span><a name="line.929"></a>
+<span class="sourceLineNo">930</span>  /**<a name="line.930"></a>
+<span class="sourceLineNo">931</span>   * Perform a bulk load of the given map of families to hfiles into the given pre-existing table.<a name="line.931"></a>
+<span class="sourceLineNo">932</span>   * This method is not threadsafe.<a name="line.932"></a>
+<span class="sourceLineNo">933</span>   * @param map map of family to List of hfiles<a name="line.933"></a>
+<span class="sourceLineNo">934</span>   * @param tableName table to load the hfiles<a name="line.934"></a>
+<span class="sourceLineNo">935</span>   * @param silence true to ignore unmatched column families<a name="line.935"></a>
+<span class="sourceLineNo">936</span>   * @param copyFile always copy hfiles if true<a name="line.936"></a>
+<span class="sourceLineNo">937</span>   */<a name="line.937"></a>
+<span class="sourceLineNo">938</span>  private Map&lt;LoadQueueItem, ByteBuffer&gt; doBulkLoad(AsyncClusterConnection conn,<a name="line.938"></a>
+<span class="sourceLineNo">939</span>      TableName tableName, Map&lt;byte[], List&lt;Path&gt;&gt; map, boolean silence, boolean copyFile)<a name="line.939"></a>
+<span class="sourceLineNo">940</span>      throws IOException {<a name="line.940"></a>
+<span class="sourceLineNo">941</span>    tableExists(conn, tableName);<a name="line.941"></a>
+<span class="sourceLineNo">942</span>    // LQI queue does not need to be threadsafe -- all operations on this queue<a name="line.942"></a>
+<span class="sourceLineNo">943</span>    // happen in this thread<a name="line.943"></a>
+<span class="sourceLineNo">944</span>    Deque&lt;LoadQueueItem&gt; queue = new ArrayDeque&lt;&gt;();<a name="line.944"></a>
+<span class="sourceLineNo">945</span>    ExecutorService pool = null;<a name="line.945"></a>
+<span class="sourceLineNo">946</span>    try {<a name="line.946"></a>
+<span class="sourceLineNo">947</span>      prepareHFileQueue(conn, tableName, map, queue, silence);<a name="line.947"></a>
+<span class="sourceLineNo">948</span>      if (queue.isEmpty()) {<a name="line.948"></a>
+<span class="sourceLineNo">949</span>        LOG.warn("Bulk load operation did not get any files to load");<a name="line.949"></a>
+<span class="sourceLineNo">950</span>        return Collections.emptyMap();<a name="line.950"></a>
+<span class="sourceLineNo">951</span>      }<a name="line.951"></a>
+<span class="sourceLineNo">952</span>      pool = createExecutorService();<a name="line.952"></a>
+<span class="sourceLineNo">953</span>      return performBulkLoad(conn, tableName, queue, pool, copyFile);<a name="line.953"></a>
+<span class="sourceLineNo">954</span>    } finally {<a name="line.954"></a>
+<span class="sourceLineNo">955</span>      cleanup(conn, tableName, queue, pool);<a name="line.955"></a>
+<span class="sourceLineNo">956</span>    }<a name="line.956"></a>
+<span class="sourceLineNo">957</span>  }<a name="line.957"></a>
+<span class="sourceLineNo">958</span><a name="line.958"></a>
+<span class="sourceLineNo">959</span>  /**<a name="line.959"></a>
+<span class="sourceLineNo">960</span>   * Perform a bulk load of the given directory into the given pre-existing table. This method is<a name="line.960"></a>
+<span class="sourceLineNo">961</span>   * not threadsafe.<a name="line.961"></a>
+<span class="sourceLineNo">962</span>   * @param tableName table to load the hfiles<a name="line.962"></a>
+<span class="sourceLineNo">963</span>   * @param hfofDir the directory that was provided as the output path of a job using<a name="line.963"></a>
+<span class="sourceLineNo">964</span>   *          HFileOutputFormat<a name="line.964"></a>
+<span class="sourceLineNo">965</span>   * @param silence true to ignore unmatched column families<a name="line.965"></a>
+<span class="sourceLineNo">966</span>   * @param copyFile always copy hfiles if true<a name="line.966"></a>
+<span class="sourceLineNo">967</span>   */<a name="line.967"></a>
+<span class="sourceLineNo">968</span>  private Map&lt;LoadQueueItem, ByteBuffer&gt; doBulkLoad(AsyncClusterConnection conn,<a name="line.968"></a>
+<span class="sourceLineNo">969</span>      TableName tableName, Path hfofDir, boolean silence, boolean copyFile)<a name="line.969"></a>
+<span class="sourceLineNo">970</span>      throws IOException {<a name="line.970"></a>
+<span class="sourceLineNo">971</span>    tableExists(conn, tableName);<a name="line.971"></a>
+<span class="sourceLineNo">972</span><a name="line.972"></a>
+<span class="sourceLineNo">973</span>    /*<a name="line.973"></a>
+<span class="sourceLineNo">974</span>     * Checking hfile format is a time-consuming operation, we should have an option to skip this<a name="line.974"></a>
+<span class="sourceLineNo">975</span>     * step when bulkloading millions of HFiles. See HBASE-13985.<a name="line.975"></a>
+<span class="sourceLineNo">976</span>     */<a name="line.976"></a>
+<span class="sourceLineNo">977</span>    boolean validateHFile = getConf().getBoolean(VALIDATE_HFILES, true);<a name="line.977"></a>
+<span class="sourceLineNo">978</span>    if (!validateHFile) {<a name="line.978"></a>
+<span class="sourceLineNo">979</span>      LOG.warn("You are skipping HFiles validation, it might cause some data loss if files " +<a name="line.979"></a>
+<span class="sourceLineNo">980</span>        "are not correct. If you fail to read data from your table after using this " +<a name="line.980"></a>
+<span class="sourceLineNo">981</span>        "option, consider removing the files and bulkload again without this option. " +<a name="line.981"></a>
+<span class="sourceLineNo">982</span>        "See HBASE-13985");<a name="line.982"></a>
+<span class="sourceLineNo">983</span>    }<a name="line.983"></a>
+<span class="sourceLineNo">984</span>    // LQI queue does not need to be threadsafe -- all operations on this queue<a name="line.984"></a>
+<span class="sourceLineNo">985</span>    // happen in this thread<a name="line.985"></a>
+<span class="sourceLineNo">986</span>    Deque&lt;LoadQueueItem&gt; queue = new ArrayDeque&lt;&gt;();<a name="line.986"></a>
+<span class="sourceLineNo">987</span>    ExecutorService pool = null;<a name="line.987"></a>
+<span class="sourceLineNo">988</span>    try {<a name="line.988"></a>
+<span class="sourceLineNo">989</span>      prepareHFileQueue(getConf(), conn, tableName, hfofDir, queue, validateHFile, silence);<a name="line.989"></a>
+<span class="sourceLineNo">990</span><a name="line.990"></a>
+<span class="sourceLineNo">991</span>      if (queue.isEmpty()) {<a name="line.991"></a>
+<span class="sourceLineNo">992</span>        LOG.warn(<a name="line.992"></a>
+<span class="sourceLineNo">993</span>          "Bulk load operation did not find any files to load in directory {}. " +<a name="line.993"></a>
+<span class="sourceLineNo">994</span>          "Does it contain files in subdirectories that correspond to column family names?",<a name="line.994"></a>
+<span class="sourceLineNo">995</span>          (hfofDir != null ? hfofDir.toUri().toString() : ""));<a name="line.995"></a>
+<span class="sourceLineNo">996</span>        return Collections.emptyMap();<a name="line.996"></a>
+<span class="sourceLineNo">997</span>      }<a name="line.997"></a>
+<span class="sourceLineNo">998</span>      pool = createExecutorService();<a name="line.998"></a>
+<span class="sourceLineNo">999</span>      return performBulkLoad(conn, tableName, queue, pool, copyFile);<a name="line.999"></a>
+<span class="sourceLineNo">1000</span>    } finally {<a name="line.1000"></a>
+<span class="sourceLineNo">1001</span>      cleanup(conn, tableName, queue, pool);<a name="line.1001"></a>
+<span class="sourceLineNo">1002</span>    }<a name="line.1002"></a>
+<span class="sourceLineNo">1003</span>  }<a name="line.1003"></a>
+<span class="sourceLineNo">1004</span><a name="line.1004"></a>
+<span class="sourceLineNo">1005</span>  @Override<a name="line.1005"></a>
+<span class="sourceLineNo">1006</span>  public Map&lt;LoadQueueItem, ByteBuffer&gt; bulkLoad(TableName tableName,<a name="line.1006"></a>
+<span class="sourceLineNo">1007</span>      Map&lt;byte[], List&lt;Path&gt;&gt; family2Files) throws IOException {<a name="line.1007"></a>
+<span class="sourceLineNo">1008</span>    try (AsyncClusterConnection conn = ClusterConnectionFactory.<a name="line.1008"></a>
+<span class="sourceLineNo">1009</span>        createAsyncClusterConnection(getConf(), null, userProvider.getCurrent())) {<a name="line.1009"></a>
+<span class="sourceLineNo">1010</span>      return doBulkLoad(conn, tableName, family2Files, isSilence(), isAlwaysCopyFiles());<a name="line.1010"></a>
+<span class="sourceLineNo">1011</span>    }<a name="line.1011"></a>
+<span class="sourceLineNo">1012</span>  }<a name="line.1012"></a>
+<span class="sourceLineNo">1013</span><a name="line.1013"></a>
+<span class="sourceLineNo">1014</span>  @Override<a name="line.1014"></a>
+<span class="sourceLineNo">1015</span>  public Map&lt;LoadQueueItem, ByteBuffer&gt; bulkLoad(TableName tableName, Path dir)<a name="line.1015"></a>
+<span class="sourceLineNo">1016</span>      throws IOException {<a name="line.1016"></a>
+<span class="sourceLineNo">1017</span>    try (AsyncClusterConnection conn = ClusterConnectionFactory<a name="line.1017"></a>
+<span class="sourceLineNo">1018</span>      .createAsyncClusterConnection(getConf(), null, userProvider.getCurrent())) {<a name="line.1018"></a>
+<span class="sourceLineNo">1019</span>      AsyncAdmin admin = conn.getAdmin();<a name="line.1019"></a>
+<span class="sourceLineNo">1020</span>      if (!FutureUtils.get(admin.tableExists(tableName))) {<a name="line.1020"></a>
+<span class="sourceLineNo">1021</span>        if (isCreateTable()) {<a name="line.1021"></a>
+<span class="sourceLineNo">1022</span>          createTable(tableName, dir, admin);<a name="line.1022"></a>
+<span class="sourceLineNo">1023</span>        } else {<a name="line.1023"></a>
+<span class="sourceLineNo">1024</span>          throwAndLogTableNotFoundException(tableName);<a name="line.1024"></a>
+<span class="sourceLineNo">1025</span>        }<a name="line.1025"></a>
+<span class="sourceLineNo">1026</span>      }<a name="line.1026"></a>
+<span class="sourceLineNo">1027</span>      return doBulkLoad(conn, tableName, dir, isSilence(), isAlwaysCopyFiles());<a name="line.1027"></a>
+<span class="sourceLineNo">1028</span>    }<a name="line.1028"></a>
+<span class="sourceLineNo">1029</span>  }<a name="line.1029"></a>
+<span class="sourceLineNo">1030</span><a name="line.1030"></a>
+<span class="sourceLineNo">1031</span>  /**<a name="line.1031"></a>
+<span class="sourceLineNo">1032</span>   * @throws TableNotFoundException if table does not exist.<a name="line.1032"></a>
+<span class="sourceLineNo">1033</span>   */<a name="line.1033"></a>
+<span class="sourceLineNo">1034</span>  private void tableExists(AsyncClusterConnection conn, TableName tableName) throws IOException {<a name="line.1034"></a>
+<span class="sourceLineNo">1035</span>    if (!FutureUtils.get(conn.getAdmin().tableExists(tableName))) {<a name="line.1035"></a>
+<span class="sourceLineNo">1036</span>      throwAndLogTableNotFoundException(tableName);<a name="line.1036"></a>
+<span class="sourceLineNo">1037</span>    }<a name="line.1037"></a>
+<span class="sourceLineNo">1038</span>  }<a name="line.1038"></a>
+<span class="sourceLineNo">1039</span><a name="line.1039"></a>
+<span class="sourceLineNo">1040</span>  private void throwAndLogTableNotFoundException(TableName tn) throws TableNotFoundException {<a name="line.1040"></a>
+<span class="sourceLineNo">1041</span>    String errorMsg = format("Table '%s' does not exist.", tn);<a name="line.1041"></a>
+<span class="sourceLineNo">1042</span>    LOG.error(errorMsg);<a name="line.1042"></a>
+<span class="sourceLineNo">1043</span>    throw new TableNotFoundException(errorMsg);<a name="line.1043"></a>
 <span class="sourceLineNo">1044</span>  }<a name="line.1044"></a>
 <span class="sourceLineNo">1045</span><a name="line.1045"></a>
-<span class="sourceLineNo">1046</span>  private void throwAndLogTableNotFoundException(TableName tn) throws TableNotFoundException {<a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>    String errorMsg = format("Table '%s' does not exist.", tn);<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>    LOG.error(errorMsg);<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>    throw new TableNotFoundException(errorMsg);<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>  }<a name="line.1050"></a>
-<span class="sourceLineNo">1051</span><a name="line.1051"></a>
-<span class="sourceLineNo">1052</span>  public void setBulkToken(String bulkToken) {<a name="line.1052"></a>
-<span class="sourceLineNo">1053</span>    this.bulkToken = bulkToken;<a name="line.1053"></a>
-<span class="sourceLineNo">1054</span>  }<a name="line.1054"></a>
-<span class="sourceLineNo">1055</span><a name="line.1055"></a>
-<span class="sourceLineNo">1056</span>  public void setClusterIds(List&lt;String&gt; clusterIds) {<a name="line.1056"></a>
-<span class="sourceLineNo">1057</span>    this.clusterIds = clusterIds;<a name="line.1057"></a>
-<span class="sourceLineNo">1058</span>  }<a name="line.1058"></a>
-<span class="sourceLineNo">1059</span><a name="line.1059"></a>
-<span class="sourceLineNo">1060</span>  private void usage() {<a name="line.1060"></a>
-<span class="sourceLineNo">1061</span>    System.err.println("Usage: " + "bin/hbase completebulkload [OPTIONS] "<a name="line.1061"></a>
-<span class="sourceLineNo">1062</span>        + "&lt;/PATH/TO/HFILEOUTPUTFORMAT-OUTPUT&gt; &lt;TABLENAME&gt;\n"<a name="line.1062"></a>
-<span class="sourceLineNo">1063</span>        + "Loads directory of hfiles -- a region dir or product of HFileOutputFormat -- "<a name="line.1063"></a>
-<span class="sourceLineNo">1064</span>        + "into an hbase table.\n"<a name="line.1064"></a>
-<span class="sourceLineNo">1065</span>        + "OPTIONS (for other -D options, see source code):\n"<a name="line.1065"></a>
-<span class="sourceLineNo">1066</span>        + " -D" + CREATE_TABLE_CONF_KEY + "=no whether to create table; when 'no', target "<a name="line.1066"></a>
-<span class="sourceLineNo">1067</span>        + "table must exist.\n"<a name="line.1067"></a>
-<span class="sourceLineNo">1068</span>        + " -D" + IGNORE_UNMATCHED_CF_CONF_KEY + "=yes to ignore unmatched column families.\n"<a name="line.1068"></a>
-<span class="sourceLineNo">1069</span>        + " -loadTable for when directory of files to load has a depth of 3; target table must "<a name="line.1069"></a>
-<span class="sourceLineNo">1070</span>        + "exist;\n"<a name="line.1070"></a>
-<span class="sourceLineNo">1071</span>        + " must be last of the options on command line.\n"<a name="line.1071"></a>
-<span class="sourceLineNo">1072</span>        + "See http://hbase.apache.org/book.html#arch.bulk.load.complete.strays for "<a name="line.1072"></a>
-<span class="sourceLineNo">1073</span>        + "documentation.\n");<a name="line.1073"></a>
-<span class="sourceLineNo">1074</span>  }<a name="line.1074"></a>
-<span class="sourceLineNo">1075</span><a name="line.1075"></a>
-<span class="sourceLineNo">1076</span>  @Override<a name="line.1076"></a>
-<span class="sourceLineNo">1077</span>  public int run(String[] args) throws Exception {<a name="line.1077"></a>
-<span class="sourceLineNo">1078</span>    if (args.length != 2 &amp;&amp; args.length != 3) {<a name="line.1078"></a>
-<span class="sourceLineNo">1079</span>      usage();<a name="line.1079"></a>
-<span class="sourceLineNo">1080</span>      return -1;<a name="line.1080"></a>
-<span class="sourceLineNo">1081</span>    }<a name="line.1081"></a>
-<span class="sourceLineNo">1082</span>    Path dirPath = new Path(args[0]);<a name="line.1082"></a>
-<span class="sourceLineNo">1083</span>    TableName tableName = TableName.valueOf(args[1]);<a name="line.1083"></a>
-<span class="sourceLineNo">1084</span>    if (args.length == 2) {<a name="line.1084"></a>
-<span class="sourceLineNo">1085</span>      return !bulkLoad(tableName, dirPath).isEmpty() ? 0 : -1;<a name="line.1085"></a>
-<span class="sourceLineNo">1086</span>    } else {<a name="line.1086"></a>
-<span class="sourceLineNo">1087</span>      Map&lt;byte[], List&lt;Path&gt;&gt; family2Files = Maps.newHashMap();<a name="line.1087"></a>
-<span class="sourceLineNo">1088</span>      FileSystem fs = FileSystem.get(getConf());<a name="line.1088"></a>
-<span class="sourceLineNo">1089</span>      for (FileStatus regionDir : fs.listStatus(dirPath)) {<a name="line.1089"></a>
-<span class="sourceLineNo">1090</span>        FSVisitor.visitRegionStoreFiles(fs, regionDir.getPath(), (region, family, hfileName) -&gt; {<a name="line.1090"></a>
-<span class="sourceLineNo">1091</span>          Path path = new Path(regionDir.getPath(), new Path(family, hfileName));<a name="line.1091"></a>
-<span class="sourceLineNo">1092</span>          byte[] familyName = Bytes.toBytes(family);<a name="line.1092"></a>
-<span class="sourceLineNo">1093</span>          if (family2Files.containsKey(familyName)) {<a name="line.1093"></a>
-<span class="sourceLineNo">1094</span>            family2Files.get(familyName).add(path);<a name="line.1094"></a>
-<span class="sourceLineNo">1095</span>          } else {<a name="line.1095"></a>
-<span class="sourceLineNo">1096</span>            family2Files.put(familyName, Lists.newArrayList(path));<a name="line.1096"></a>
-<span class="sourceLineNo">1097</span>          }<a name="line.1097"></a>
-<span class="sourceLineNo">1098</span>        });<a name="line.1098"></a>
-<span class="sourceLineNo">1099</span>      }<a name="line.1099"></a>
-<span class="sourceLineNo">1100</span>      return !bulkLoad(tableName, family2Files).isEmpty() ? 0 : -1;<a name="line.1100"></a>
-<span class="sourceLineNo">1101</span>    }<a name="line.1101"></a>
+<span class="sourceLineNo">1046</span>  public void setBulkToken(String bulkToken) {<a name="line.1046"></a>
+<span class="sourceLineNo">1047</span>    this.bulkToken = bulkToken;<a name="line.1047"></a>
+<span class="sourceLineNo">1048</span>  }<a name="line.1048"></a>
+<span class="sourceLineNo">1049</span><a name="line.1049"></a>
+<span class="sourceLineNo">1050</span>  public void setClusterIds(List&lt;String&gt; clusterIds) {<a name="line.1050"></a>
+<span class="sourceLineNo">1051</span>    this.clusterIds = clusterIds;<a name="line.1051"></a>
+<span class="sourceLineNo">1052</span>  }<a name="line.1052"></a>
+<span class="sourceLineNo">1053</span><a name="line.1053"></a>
+<span class="sourceLineNo">1054</span>  private void usage() {<a name="line.1054"></a>
+<span class="sourceLineNo">1055</span>    System.err.println("Usage: " + "bin/hbase completebulkload [OPTIONS] "<a name="line.1055"></a>
+<span class="sourceLineNo">1056</span>        + "&lt;/PATH/TO/HFILEOUTPUTFORMAT-OUTPUT&gt; &lt;TABLENAME&gt;\n"<a name="line.1056"></a>
+<span class="sourceLineNo">1057</span>        + "Loads directory of hfiles -- a region dir or product of HFileOutputFormat -- "<a name="line.1057"></a>
+<span class="sourceLineNo">1058</span>        + "into an hbase table.\n"<a name="line.1058"></a>
+<span class="sourceLineNo">1059</span>        + "OPTIONS (for other -D options, see source code):\n"<a name="line.1059"></a>
+<span class="sourceLineNo">1060</span>        + " -D" + CREATE_TABLE_CONF_KEY + "=no whether to create table; when 'no', target "<a name="line.1060"></a>
+<span class="sourceLineNo">1061</span>        + "table must exist.\n"<a name="line.1061"></a>
+<span class="sourceLineNo">1062</span>        + " -D" + IGNORE_UNMATCHED_CF_CONF_KEY + "=yes to ignore unmatched column families.\n"<a name="line.1062"></a>
+<span class="sourceLineNo">1063</span>        + " -loadTable for when directory of files to load has a depth of 3; target table must "<a name="line.1063"></a>
+<span class="sourceLineNo">1064</span>        + "exist;\n"<a name="line.1064"></a>
+<span class="sourceLineNo">1065</span>        + " must be last of the options on command line.\n"<a name="line.1065"></a>
+<span class="sourceLineNo">1066</span>        + "See http://hbase.apache.org/book.html#arch.bulk.load.complete.strays for "<a name="line.1066"></a>
+<span class="sourceLineNo">1067</span>        + "documentation.\n");<a name="line.1067"></a>
+<span class="sourceLineNo">1068</span>  }<a name="line.1068"></a>
+<span class="sourceLineNo">1069</span><a name="line.1069"></a>
+<span class="sourceLineNo">1070</span>  @Override<a name="line.1070"></a>
+<span class="sourceLineNo">1071</span>  public int run(String[] args) throws Exception {<a name="line.1071"></a>
+<span class="sourceLineNo">1072</span>    if (args.length != 2 &amp;&amp; args.length != 3) {<a name="line.1072"></a>
+<span class="sourceLineNo">1073</span>      usage();<a name="line.1073"></a>
+<span class="sourceLineNo">1074</span>      return -1;<a name="line.1074"></a>
+<span class="sourceLineNo">1075</span>    }<a name="line.1075"></a>
+<span class="sourceLineNo">1076</span>    Path dirPath = new Path(args[0]);<a name="line.1076"></a>
+<span class="sourceLineNo">1077</span>    TableName tableName = TableName.valueOf(args[1]);<a name="line.1077"></a>
+<span class="sourceLineNo">1078</span>    if (args.length == 2) {<a name="line.1078"></a>
+<span class="sourceLineNo">1079</span>      return !bulkLoad(tableName, dirPath).isEmpty() ? 0 : -1;<a name="line.1079"></a>
+<span class="sourceLineNo">1080</span>    } else {<a name="line.1080"></a>
+<span class="sourceLineNo">1081</span>      Map&lt;byte[], List&lt;Path&gt;&gt; family2Files = Maps.newHashMap();<a name="line.1081"></a>
+<span class="sourceLineNo">1082</span>      FileSystem fs = FileSystem.get(getConf());<a name="line.1082"></a>
+<span class="sourceLineNo">1083</span>      for (FileStatus regionDir : fs.listStatus(dirPath)) {<a name="line.1083"></a>
+<span class="sourceLineNo">1084</span>        FSVisitor.visitRegionStoreFiles(fs, regionDir.getPath(), (region, family, hfileName) -&gt; {<a name="line.1084"></a>
+<span class="sourceLineNo">1085</span>          Path path = new Path(regionDir.getPath(), new Path(family, hfileName));<a name="line.1085"></a>
+<span class="sourceLineNo">1086</span>          byte[] familyName = Bytes.toBytes(family);<a name="line.1086"></a>
+<span class="sourceLineNo">1087</span>          if (family2Files.containsKey(familyName)) {<a name="line.1087"></a>
+<span class="sourceLineNo">1088</span>            family2Files.get(familyName).add(path);<a name="line.1088"></a>
+<span class="sourceLineNo">1089</span>          } else {<a name="line.1089"></a>
+<span class="sourceLineNo">1090</span>            family2Files.put(familyName, Lists.newArrayList(path));<a name="line.1090"></a>
+<span class="sourceLineNo">1091</span>          }<a name="line.1091"></a>
+<span class="sourceLineNo">1092</span>        });<a name="line.1092"></a>
+<span class="sourceLineNo">1093</span>      }<a name="line.1093"></a>
+<span class="sourceLineNo">1094</span>      return !bulkLoad(tableName, family2Files).isEmpty() ? 0 : -1;<a name="line.1094"></a>
+<span class="sourceLineNo">1095</span>    }<a name="line.1095"></a>
+<span class="sourceLineNo">1096</span>  }<a name="line.1096"></a>
+<span class="sourceLineNo">1097</span><a name="line.1097"></a>
+<span class="sourceLineNo">1098</span>  public static void main(String[] args) throws Exception {<a name="line.1098"></a>
+<span class="sourceLineNo">1099</span>    Configuration conf = HBaseConfiguration.create();<a name="line.1099"></a>
+<span class="sourceLineNo">1100</span>    int ret = ToolRunner.run(conf, new BulkLoadHFilesTool(conf), args);<a name="line.1100"></a>
+<span class="sourceLineNo">1101</span>    System.exit(ret);<a name="line.1101"></a>
 <span class="sourceLineNo">1102</span>  }<a name="line.1102"></a>
 <span class="sourceLineNo">1103</span><a name="line.1103"></a>
-<span class="sourceLineNo">1104</span>  public static void main(String[] args) throws Exception {<a name="line.1104"></a>
-<span class="sourceLineNo">1105</span>    Configuration conf = HBaseConfiguration.create();<a name="line.1105"></a>
-<span class="sourceLineNo">1106</span>    int ret = ToolRunner.run(conf, new BulkLoadHFilesTool(conf), args);<a name="line.1106"></a>
-<span class="sourceLineNo">1107</span>    System.exit(ret);<a name="line.1107"></a>
-<span class="sourceLineNo">1108</span>  }<a name="line.1108"></a>
-<span class="sourceLineNo">1109</span><a name="line.1109"></a>
-<span class="sourceLineNo">1110</span>  @Override<a name="line.1110"></a>
-<span class="sourceLineNo">1111</span>  public void disableReplication(){<a name="line.1111"></a>
-<span class="sourceLineNo">1112</span>    this.replicate = false;<a name="line.1112"></a>
-<span class="sourceLineNo">1113</span>  }<a name="line.1113"></a>
-<span class="sourceLineNo">1114</span><a name="line.1114"></a>
-<span class="sourceLineNo">1115</span>  @Override<a name="line.1115"></a>
-<span class="sourceLineNo">1116</span>  public boolean isReplicationDisabled(){<a name="line.1116"></a>
-<span class="sourceLineNo">1117</span>    return !this.replicate;<a name="line.1117"></a>
-<span class="sourceLineNo">1118</span>  }<a name="line.1118"></a>
-<span class="sourceLineNo">1119</span>}<a name="line.1119"></a>
+<span class="sourceLineNo">1104</span>  @Override<a name="line.1104"></a>
+<span class="sourceLineNo">1105</span>  public void disableReplication(){<a name="line.1105"></a>
+<span class="sourceLineNo">1106</span>    this.replicate = false;<a name="line.1106"></a>
+<span class="sourceLineNo">1107</span>  }<a name="line.1107"></a>
+<span class="sourceLineNo">1108</span><a name="line.1108"></a>
+<span class="sourceLineNo">1109</span>  @Override<a name="line.1109"></a>
+<span class="sourceLineNo">1110</span>  public boolean isReplicationDisabled(){<a name="line.1110"></a>
+<span class="sourceLineNo">1111</span>    return !this.replicate;<a name="line.1111"></a>
+<span class="sourceLineNo">1112</span>  }<a name="line.1112"></a>
+<span class="sourceLineNo">1113</span>}<a name="line.1113"></a>
 
 
 
diff --git a/downloads.html b/downloads.html
index 40fa9c6..47fd30a 100644
--- a/downloads.html
+++ b/downloads.html
@@ -434,7 +434,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/export_control.html b/export_control.html
index a93418c..875c50b 100644
--- a/export_control.html
+++ b/export_control.html
@@ -197,7 +197,7 @@ for more details.</p>
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/index.html b/index.html
index 0f2d745..5572f9e 100644
--- a/index.html
+++ b/index.html
@@ -275,7 +275,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/issue-tracking.html b/issue-tracking.html
index 62c8374..831b67e 100644
--- a/issue-tracking.html
+++ b/issue-tracking.html
@@ -169,7 +169,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/mail-lists.html b/mail-lists.html
index af956b5..356d02a 100644
--- a/mail-lists.html
+++ b/mail-lists.html
@@ -229,7 +229,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/metrics.html b/metrics.html
index 7328142..2ba5a83 100644
--- a/metrics.html
+++ b/metrics.html
@@ -325,7 +325,7 @@ export HBASE_REGIONSERVER_OPTS=&quot;$HBASE_JMX_OPTS -Dcom.sun.management.jmxrem
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/old_news.html b/old_news.html
index 9d4c03f..7874b7b 100644
--- a/old_news.html
+++ b/old_news.html
@@ -316,7 +316,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/plugin-management.html b/plugin-management.html
index cbfe581..115dbc86 100644
--- a/plugin-management.html
+++ b/plugin-management.html
@@ -321,7 +321,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/plugins.html b/plugins.html
index bec32d3..abd5a9e 100644
--- a/plugins.html
+++ b/plugins.html
@@ -248,7 +248,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/poweredbyhbase.html b/poweredbyhbase.html
index 1729207..b028924 100644
--- a/poweredbyhbase.html
+++ b/poweredbyhbase.html
@@ -650,7 +650,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/project-info.html b/project-info.html
index fe6e108..2eaaea6 100644
--- a/project-info.html
+++ b/project-info.html
@@ -210,7 +210,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/project-reports.html b/project-reports.html
index 1b58573..2eafd85 100644
--- a/project-reports.html
+++ b/project-reports.html
@@ -186,7 +186,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/project-summary.html b/project-summary.html
index f954bd5..b703701 100644
--- a/project-summary.html
+++ b/project-summary.html
@@ -212,7 +212,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/pseudo-distributed.html b/pseudo-distributed.html
index 81e3505..62594da 100644
--- a/pseudo-distributed.html
+++ b/pseudo-distributed.html
@@ -174,7 +174,7 @@ Running Apache HBase (TM) in pseudo-distributed mode
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/replication.html b/replication.html
index 087e363..e8fbcae 100644
--- a/replication.html
+++ b/replication.html
@@ -169,7 +169,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/resources.html b/resources.html
index 9f84d40..11f8dc1 100644
--- a/resources.html
+++ b/resources.html
@@ -197,7 +197,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/source-repository.html b/source-repository.html
index adfab1d..9c7070d 100644
--- a/source-repository.html
+++ b/source-repository.html
@@ -180,7 +180,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/sponsors.html b/sponsors.html
index 4bd48be..f58287e 100644
--- a/sponsors.html
+++ b/sponsors.html
@@ -199,7 +199,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/supportingprojects.html b/supportingprojects.html
index ff9b2cb..cd770fb 100644
--- a/supportingprojects.html
+++ b/supportingprojects.html
@@ -390,7 +390,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/team-list.html b/team-list.html
index 098ebfc..ec61c0f 100644
--- a/team-list.html
+++ b/team-list.html
@@ -701,7 +701,7 @@
         <div class="row">
             <p>Copyright &copy;2007&#x2013;2020
 <a href="https://www.apache.org/">The Apache Software Foundation</a>.
-All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-21</li>
+All rights reserved.        <li id="publishDate" class="pull-right">Last Published: 2020-06-22</li>
 </p>
         </div>
         <p id="poweredBy" class="pull-right"><a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy"><img class="builtBy" alt="Built by Maven" src="./images/logos/maven-feather.png" /></a>
diff --git a/testdevapidocs/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html b/testdevapidocs/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html
index a30f531..c360bd8 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 <li class="blockList">
 <hr>
 <br>
-<pre>public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.47">TestFsDelegationToken</a>
+<pre>public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.46">TestFsDelegationToken</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -257,7 +257,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>userProvider</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.security.UserProvider <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.48">userProvider</a></pre>
+<pre>private&nbsp;org.apache.hadoop.hbase.security.UserProvider <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.47">userProvider</a></pre>
 </li>
 </ul>
 <a name="user">
@@ -266,7 +266,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>user</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.security.User <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.49">user</a></pre>
+<pre>private&nbsp;org.apache.hadoop.hbase.security.User <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.48">user</a></pre>
 </li>
 </ul>
 <a name="fsDelegationToken">
@@ -275,7 +275,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>fsDelegationToken</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.security.token.FsDelegationToken <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.50">fsDelegationToken</a></pre>
+<pre>private&nbsp;org.apache.hadoop.hbase.security.token.FsDelegationToken <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.49">fsDelegationToken</a></pre>
 </li>
 </ul>
 <a name="hdfsToken">
@@ -284,7 +284,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>hdfsToken</h4>
-<pre>private&nbsp;org.apache.hadoop.security.token.Token <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.51">hdfsToken</a></pre>
+<pre>private&nbsp;org.apache.hadoop.security.token.Token <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.50">hdfsToken</a></pre>
 </li>
 </ul>
 <a name="webhdfsToken">
@@ -293,7 +293,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>webhdfsToken</h4>
-<pre>private&nbsp;org.apache.hadoop.security.token.Token <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.52">webhdfsToken</a></pre>
+<pre>private&nbsp;org.apache.hadoop.security.token.Token <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.51">webhdfsToken</a></pre>
 </li>
 </ul>
 <a name="swebhdfsToken">
@@ -302,7 +302,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>swebhdfsToken</h4>
-<pre>private&nbsp;org.apache.hadoop.security.token.Token <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.53">swebhdfsToken</a></pre>
+<pre>private&nbsp;org.apache.hadoop.security.token.Token <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.52">swebhdfsToken</a></pre>
 </li>
 </ul>
 <a name="webHdfsFileSystem">
@@ -311,7 +311,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>webHdfsFileSystem</h4>
-<pre>private&nbsp;org.apache.hadoop.hdfs.web.WebHdfsFileSystem <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.54">webHdfsFileSystem</a></pre>
+<pre>private&nbsp;org.apache.hadoop.hdfs.web.WebHdfsFileSystem <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.53">webHdfsFileSystem</a></pre>
 </li>
 </ul>
 <a name="swebHdfsFileSystem">
@@ -320,7 +320,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>swebHdfsFileSystem</h4>
-<pre>private&nbsp;org.apache.hadoop.hdfs.web.WebHdfsFileSystem <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.55">swebHdfsFileSystem</a></pre>
+<pre>private&nbsp;org.apache.hadoop.hdfs.web.WebHdfsFileSystem <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.54">swebHdfsFileSystem</a></pre>
 </li>
 </ul>
 <a name="fileSystem">
@@ -329,7 +329,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>fileSystem</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.56">fileSystem</a></pre>
+<pre>private&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.55">fileSystem</a></pre>
 </li>
 </ul>
 <a name="CLASS_RULE">
@@ -338,7 +338,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CLASS_RULE</h4>
-<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.59">CLASS_RULE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.58">CLASS_RULE</a></pre>
 </li>
 </ul>
 </li>
@@ -355,7 +355,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TestFsDelegationToken</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.47">TestFsDelegationToken</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.46">TestFsDelegationToken</a>()</pre>
 </li>
 </ul>
 </li>
@@ -372,7 +372,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setup</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.63">setup</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.62">setup</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                   <a href="https://docs.oracle.com/javase/8/docs/api/java/net/URISyntaxException.html?is-external=true" title="class or interface in java.net">URISyntaxException</a></pre>
 <dl>
@@ -388,7 +388,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>acquireDelegationToken_defaults_to_hdfsFileSystem</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.91">acquireDelegationToken_defaults_to_hdfsFileSystem</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.87">acquireDelegationToken_defaults_to_hdfsFileSystem</a>()
                                                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -402,7 +402,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>acquireDelegationToken_webhdfsFileSystem</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.102">acquireDelegationToken_webhdfsFileSystem</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.94">acquireDelegationToken_webhdfsFileSystem</a>()
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -416,7 +416,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>acquireDelegationToken_swebhdfsFileSystem</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.113">acquireDelegationToken_swebhdfsFileSystem</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.101">acquireDelegationToken_swebhdfsFileSystem</a>()
                                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -430,7 +430,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>acquireDelegationTokenByTokenKind_rejects_null_token_kind</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.124">acquireDelegationTokenByTokenKind_rejects_null_token_kind</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.108">acquireDelegationTokenByTokenKind_rejects_null_token_kind</a>()
                                                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -444,7 +444,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>acquireDelegationTokenByTokenKind_webhdfsFileSystem</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.129">acquireDelegationTokenByTokenKind_webhdfsFileSystem</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.113">acquireDelegationTokenByTokenKind_webhdfsFileSystem</a>()
                                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -458,7 +458,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>acquireDelegationTokenByTokenKind_swebhdfsFileSystem</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.140">acquireDelegationTokenByTokenKind_swebhdfsFileSystem</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html#line.120">acquireDelegationTokenByTokenKind_swebhdfsFileSystem</a>()
                                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html
index c73e51b..bcb30ba 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/security/token/TestFsDelegationToken.html
@@ -29,133 +29,109 @@
 <span class="sourceLineNo">021</span>import static org.apache.hadoop.hdfs.web.WebHdfsConstants.SWEBHDFS_TOKEN_KIND;<a name="line.21"></a>
 <span class="sourceLineNo">022</span>import static org.apache.hadoop.hdfs.web.WebHdfsConstants.WEBHDFS_TOKEN_KIND;<a name="line.22"></a>
 <span class="sourceLineNo">023</span>import static org.junit.Assert.assertEquals;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import static org.junit.Assert.assertNotNull;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import static org.mockito.Mockito.when;<a name="line.25"></a>
-<span class="sourceLineNo">026</span><a name="line.26"></a>
-<span class="sourceLineNo">027</span>import java.io.IOException;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import java.net.URI;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import java.net.URISyntaxException;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.fs.FileSystem;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.security.User;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.security.UserProvider;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.testclassification.SecurityTests;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.testclassification.SmallTests;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.io.Text;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.security.token.Token;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.junit.Before;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.junit.ClassRule;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.junit.Test;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.junit.experimental.categories.Category;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.mockito.Mockito;<a name="line.44"></a>
-<span class="sourceLineNo">045</span><a name="line.45"></a>
-<span class="sourceLineNo">046</span>@Category({SecurityTests.class, SmallTests.class})<a name="line.46"></a>
-<span class="sourceLineNo">047</span>public class TestFsDelegationToken {<a name="line.47"></a>
-<span class="sourceLineNo">048</span>  private UserProvider userProvider = Mockito.mock(UserProvider.class);<a name="line.48"></a>
-<span class="sourceLineNo">049</span>  private User user = Mockito.mock(User.class);<a name="line.49"></a>
-<span class="sourceLineNo">050</span>  private FsDelegationToken fsDelegationToken = new FsDelegationToken(userProvider, "Renewer");<a name="line.50"></a>
-<span class="sourceLineNo">051</span>  private Token hdfsToken = Mockito.mock(Token.class);<a name="line.51"></a>
-<span class="sourceLineNo">052</span>  private Token webhdfsToken = Mockito.mock(Token.class);<a name="line.52"></a>
-<span class="sourceLineNo">053</span>  private Token swebhdfsToken = Mockito.mock(Token.class);<a name="line.53"></a>
-<span class="sourceLineNo">054</span>  private WebHdfsFileSystem webHdfsFileSystem = Mockito.mock(WebHdfsFileSystem.class);<a name="line.54"></a>
-<span class="sourceLineNo">055</span>  private WebHdfsFileSystem swebHdfsFileSystem = Mockito.mock(SWebHdfsFileSystem.class);<a name="line.55"></a>
-<span class="sourceLineNo">056</span>  private FileSystem fileSystem = Mockito.mock(FileSystem.class);<a name="line.56"></a>
-<span class="sourceLineNo">057</span><a name="line.57"></a>
-<span class="sourceLineNo">058</span>  @ClassRule<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.59"></a>
-<span class="sourceLineNo">060</span>      HBaseClassTestRule.forClass(TestFsDelegationToken.class);<a name="line.60"></a>
-<span class="sourceLineNo">061</span><a name="line.61"></a>
-<span class="sourceLineNo">062</span>  @Before<a name="line.62"></a>
-<span class="sourceLineNo">063</span>  public void setup() throws IOException, URISyntaxException {<a name="line.63"></a>
-<span class="sourceLineNo">064</span>    when(userProvider.getCurrent()).thenReturn(user);<a name="line.64"></a>
-<span class="sourceLineNo">065</span>    when(userProvider.isHadoopSecurityEnabled()).thenReturn(true);<a name="line.65"></a>
-<span class="sourceLineNo">066</span>    when(fileSystem.getCanonicalServiceName()).thenReturn("hdfs://");<a name="line.66"></a>
-<span class="sourceLineNo">067</span>    when(fileSystem.getUri()).thenReturn(new URI("hdfs://someUri"));<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    when(webHdfsFileSystem.getCanonicalServiceName()).thenReturn("webhdfs://");<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    when(webHdfsFileSystem.getUri()).thenReturn(new URI("webhdfs://someUri"));<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    when(swebHdfsFileSystem.getCanonicalServiceName()).thenReturn("swebhdfs://");<a name="line.70"></a>
-<span class="sourceLineNo">071</span>    when(swebHdfsFileSystem.getUri()).thenReturn(new URI("swebhdfs://someUri"));<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    when(user.getToken(<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        HDFS_DELEGATION_KIND.toString(),<a name="line.73"></a>
-<span class="sourceLineNo">074</span>        fileSystem.getCanonicalServiceName()))<a name="line.74"></a>
-<span class="sourceLineNo">075</span>        .thenReturn(hdfsToken);<a name="line.75"></a>
-<span class="sourceLineNo">076</span>    when(user.getToken(<a name="line.76"></a>
-<span class="sourceLineNo">077</span>        WEBHDFS_TOKEN_KIND.toString(),<a name="line.77"></a>
-<span class="sourceLineNo">078</span>        webHdfsFileSystem.getCanonicalServiceName())).thenReturn(webhdfsToken);<a name="line.78"></a>
-<span class="sourceLineNo">079</span>    when(user.getToken(<a name="line.79"></a>
-<span class="sourceLineNo">080</span>        SWEBHDFS_TOKEN_KIND.toString(),<a name="line.80"></a>
-<span class="sourceLineNo">081</span>        swebHdfsFileSystem.getCanonicalServiceName())).thenReturn(swebhdfsToken);<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    when(hdfsToken.getKind()).thenReturn(new Text("HDFS_DELEGATION_TOKEN"));<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    when(webhdfsToken.getKind()).thenReturn(WEBHDFS_TOKEN_KIND);<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    when(swebhdfsToken.getKind()).thenReturn(SWEBHDFS_TOKEN_KIND);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>    when(fileSystem.getDelegationToken("Renewer")).thenReturn(hdfsToken);<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    when(webHdfsFileSystem.getDelegationToken("Renewer")).thenReturn(webhdfsToken);<a name="line.86"></a>
-<span class="sourceLineNo">087</span>    when(swebHdfsFileSystem.getDelegationToken("Renewer")).thenReturn(swebhdfsToken);<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  }<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>  @Test<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  public void acquireDelegationToken_defaults_to_hdfsFileSystem() throws IOException {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    fsDelegationToken.acquireDelegationToken(fileSystem);<a name="line.92"></a>
-<span class="sourceLineNo">093</span>    assertEquals(<a name="line.93"></a>
-<span class="sourceLineNo">094</span>        fsDelegationToken.getUserToken().getKind(), HDFS_DELEGATION_KIND);<a name="line.94"></a>
-<span class="sourceLineNo">095</span>    assertNotNull(<a name="line.95"></a>
-<span class="sourceLineNo">096</span>      "HDFS Token should exist in cache after acquired",<a name="line.96"></a>
-<span class="sourceLineNo">097</span>      userProvider.getCurrent()<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        .getToken(HDFS_DELEGATION_KIND.toString(), fileSystem.getCanonicalServiceName()));<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  }<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>  @Test<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  public void acquireDelegationToken_webhdfsFileSystem() throws IOException {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    fsDelegationToken.acquireDelegationToken(webHdfsFileSystem);<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    assertEquals(<a name="line.104"></a>
-<span class="sourceLineNo">105</span>        fsDelegationToken.getUserToken().getKind(), WEBHDFS_TOKEN_KIND);<a name="line.105"></a>
-<span class="sourceLineNo">106</span>    assertNotNull(<a name="line.106"></a>
-<span class="sourceLineNo">107</span>      "Webhdfs token should exist in cache after acquired",<a name="line.107"></a>
-<span class="sourceLineNo">108</span>      userProvider.getCurrent()<a name="line.108"></a>
-<span class="sourceLineNo">109</span>        .getToken(WEBHDFS_TOKEN_KIND.toString(), webHdfsFileSystem.getCanonicalServiceName()));<a name="line.109"></a>
+<span class="sourceLineNo">024</span>import static org.mockito.Mockito.when;<a name="line.24"></a>
+<span class="sourceLineNo">025</span><a name="line.25"></a>
+<span class="sourceLineNo">026</span>import java.io.IOException;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import java.net.URI;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import java.net.URISyntaxException;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.fs.FileSystem;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.security.User;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.security.UserProvider;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.testclassification.SecurityTests;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.testclassification.SmallTests;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.io.Text;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.security.token.Token;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.junit.Before;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.junit.ClassRule;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.junit.Test;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.junit.experimental.categories.Category;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.mockito.Mockito;<a name="line.43"></a>
+<span class="sourceLineNo">044</span><a name="line.44"></a>
+<span class="sourceLineNo">045</span>@Category({SecurityTests.class, SmallTests.class})<a name="line.45"></a>
+<span class="sourceLineNo">046</span>public class TestFsDelegationToken {<a name="line.46"></a>
+<span class="sourceLineNo">047</span>  private UserProvider userProvider = Mockito.mock(UserProvider.class);<a name="line.47"></a>
+<span class="sourceLineNo">048</span>  private User user = Mockito.mock(User.class);<a name="line.48"></a>
+<span class="sourceLineNo">049</span>  private FsDelegationToken fsDelegationToken = new FsDelegationToken(userProvider, "Renewer");<a name="line.49"></a>
+<span class="sourceLineNo">050</span>  private Token hdfsToken = Mockito.mock(Token.class);<a name="line.50"></a>
+<span class="sourceLineNo">051</span>  private Token webhdfsToken = Mockito.mock(Token.class);<a name="line.51"></a>
+<span class="sourceLineNo">052</span>  private Token swebhdfsToken = Mockito.mock(Token.class);<a name="line.52"></a>
+<span class="sourceLineNo">053</span>  private WebHdfsFileSystem webHdfsFileSystem = Mockito.mock(WebHdfsFileSystem.class);<a name="line.53"></a>
+<span class="sourceLineNo">054</span>  private WebHdfsFileSystem swebHdfsFileSystem = Mockito.mock(SWebHdfsFileSystem.class);<a name="line.54"></a>
+<span class="sourceLineNo">055</span>  private FileSystem fileSystem = Mockito.mock(FileSystem.class);<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>  @ClassRule<a name="line.57"></a>
+<span class="sourceLineNo">058</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.58"></a>
+<span class="sourceLineNo">059</span>      HBaseClassTestRule.forClass(TestFsDelegationToken.class);<a name="line.59"></a>
+<span class="sourceLineNo">060</span><a name="line.60"></a>
+<span class="sourceLineNo">061</span>  @Before<a name="line.61"></a>
+<span class="sourceLineNo">062</span>  public void setup() throws IOException, URISyntaxException {<a name="line.62"></a>
+<span class="sourceLineNo">063</span>    when(userProvider.getCurrent()).thenReturn(user);<a name="line.63"></a>
+<span class="sourceLineNo">064</span>    when(userProvider.isHadoopSecurityEnabled()).thenReturn(true);<a name="line.64"></a>
+<span class="sourceLineNo">065</span>    when(fileSystem.getCanonicalServiceName()).thenReturn("hdfs://");<a name="line.65"></a>
+<span class="sourceLineNo">066</span>    when(fileSystem.getUri()).thenReturn(new URI("hdfs://someUri"));<a name="line.66"></a>
+<span class="sourceLineNo">067</span>    when(webHdfsFileSystem.getCanonicalServiceName()).thenReturn("webhdfs://");<a name="line.67"></a>
+<span class="sourceLineNo">068</span>    when(webHdfsFileSystem.getUri()).thenReturn(new URI("webhdfs://someUri"));<a name="line.68"></a>
+<span class="sourceLineNo">069</span>    when(swebHdfsFileSystem.getCanonicalServiceName()).thenReturn("swebhdfs://");<a name="line.69"></a>
+<span class="sourceLineNo">070</span>    when(swebHdfsFileSystem.getUri()).thenReturn(new URI("swebhdfs://someUri"));<a name="line.70"></a>
+<span class="sourceLineNo">071</span>    when(user.getToken(<a name="line.71"></a>
+<span class="sourceLineNo">072</span>        HDFS_DELEGATION_KIND.toString(),<a name="line.72"></a>
+<span class="sourceLineNo">073</span>        fileSystem.getCanonicalServiceName()))<a name="line.73"></a>
+<span class="sourceLineNo">074</span>        .thenReturn(hdfsToken);<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    when(user.getToken(<a name="line.75"></a>
+<span class="sourceLineNo">076</span>        WEBHDFS_TOKEN_KIND.toString(),<a name="line.76"></a>
+<span class="sourceLineNo">077</span>        webHdfsFileSystem.getCanonicalServiceName())).thenReturn(webhdfsToken);<a name="line.77"></a>
+<span class="sourceLineNo">078</span>    when(user.getToken(<a name="line.78"></a>
+<span class="sourceLineNo">079</span>        SWEBHDFS_TOKEN_KIND.toString(),<a name="line.79"></a>
+<span class="sourceLineNo">080</span>        swebHdfsFileSystem.getCanonicalServiceName())).thenReturn(swebhdfsToken);<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    when(hdfsToken.getKind()).thenReturn(new Text("HDFS_DELEGATION_TOKEN"));<a name="line.81"></a>
+<span class="sourceLineNo">082</span>    when(webhdfsToken.getKind()).thenReturn(WEBHDFS_TOKEN_KIND);<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    when(swebhdfsToken.getKind()).thenReturn(SWEBHDFS_TOKEN_KIND);<a name="line.83"></a>
+<span class="sourceLineNo">084</span>  }<a name="line.84"></a>
+<span class="sourceLineNo">085</span><a name="line.85"></a>
+<span class="sourceLineNo">086</span>  @Test<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  public void acquireDelegationToken_defaults_to_hdfsFileSystem() throws IOException {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    fsDelegationToken.acquireDelegationToken(fileSystem);<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    assertEquals(<a name="line.89"></a>
+<span class="sourceLineNo">090</span>        fsDelegationToken.getUserToken().getKind(), HDFS_DELEGATION_KIND);<a name="line.90"></a>
+<span class="sourceLineNo">091</span>  }<a name="line.91"></a>
+<span class="sourceLineNo">092</span><a name="line.92"></a>
+<span class="sourceLineNo">093</span>  @Test<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  public void acquireDelegationToken_webhdfsFileSystem() throws IOException {<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    fsDelegationToken.acquireDelegationToken(webHdfsFileSystem);<a name="line.95"></a>
+<span class="sourceLineNo">096</span>    assertEquals(<a name="line.96"></a>
+<span class="sourceLineNo">097</span>        fsDelegationToken.getUserToken().getKind(), WEBHDFS_TOKEN_KIND);<a name="line.97"></a>
+<span class="sourceLineNo">098</span>  }<a name="line.98"></a>
+<span class="sourceLineNo">099</span><a name="line.99"></a>
+<span class="sourceLineNo">100</span>  @Test<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  public void acquireDelegationToken_swebhdfsFileSystem() throws IOException {<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    fsDelegationToken.acquireDelegationToken(swebHdfsFileSystem);<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    assertEquals(<a name="line.103"></a>
+<span class="sourceLineNo">104</span>        fsDelegationToken.getUserToken().getKind(), SWEBHDFS_TOKEN_KIND);<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  @Test(expected = NullPointerException.class)<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  public void acquireDelegationTokenByTokenKind_rejects_null_token_kind() throws IOException {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    fsDelegationToken.acquireDelegationToken(null, fileSystem);<a name="line.109"></a>
 <span class="sourceLineNo">110</span>  }<a name="line.110"></a>
 <span class="sourceLineNo">111</span><a name="line.111"></a>
 <span class="sourceLineNo">112</span>  @Test<a name="line.112"></a>
-<span class="sourceLineNo">113</span>  public void acquireDelegationToken_swebhdfsFileSystem() throws IOException {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    fsDelegationToken.acquireDelegationToken(swebHdfsFileSystem);<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    assertEquals(<a name="line.115"></a>
-<span class="sourceLineNo">116</span>        fsDelegationToken.getUserToken().getKind(), SWEBHDFS_TOKEN_KIND);<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    assertNotNull(<a name="line.117"></a>
-<span class="sourceLineNo">118</span>      "Swebhdfs token should exist in cache after acquired",<a name="line.118"></a>
-<span class="sourceLineNo">119</span>      userProvider.getCurrent()<a name="line.119"></a>
-<span class="sourceLineNo">120</span>        .getToken(SWEBHDFS_TOKEN_KIND.toString(), swebHdfsFileSystem.getCanonicalServiceName()));<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  }<a name="line.121"></a>
-<span class="sourceLineNo">122</span><a name="line.122"></a>
-<span class="sourceLineNo">123</span>  @Test(expected = NullPointerException.class)<a name="line.123"></a>
-<span class="sourceLineNo">124</span>  public void acquireDelegationTokenByTokenKind_rejects_null_token_kind() throws IOException {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    fsDelegationToken.acquireDelegationToken(null, fileSystem);<a name="line.125"></a>
-<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
-<span class="sourceLineNo">127</span><a name="line.127"></a>
-<span class="sourceLineNo">128</span>  @Test<a name="line.128"></a>
-<span class="sourceLineNo">129</span>  public void acquireDelegationTokenByTokenKind_webhdfsFileSystem() throws IOException {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    fsDelegationToken<a name="line.130"></a>
-<span class="sourceLineNo">131</span>        .acquireDelegationToken(WEBHDFS_TOKEN_KIND.toString(), webHdfsFileSystem);<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    assertEquals(fsDelegationToken.getUserToken().getKind(), WEBHDFS_TOKEN_KIND);<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    assertNotNull(<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      "Webhdfs token should exist in cache after acquired",<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      userProvider.getCurrent()<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        .getToken(WEBHDFS_TOKEN_KIND.toString(), webHdfsFileSystem.getCanonicalServiceName()));<a name="line.136"></a>
-<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>  @Test<a name="line.139"></a>
-<span class="sourceLineNo">140</span>  public void acquireDelegationTokenByTokenKind_swebhdfsFileSystem() throws IOException {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    fsDelegationToken<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        .acquireDelegationToken(<a name="line.142"></a>
-<span class="sourceLineNo">143</span>            SWEBHDFS_TOKEN_KIND.toString(), swebHdfsFileSystem);<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    assertEquals(fsDelegationToken.getUserToken().getKind(), SWEBHDFS_TOKEN_KIND);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    assertNotNull(<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      "Swebhdfs token should exist in cache after acquired",<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      userProvider.getCurrent()<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        .getToken(SWEBHDFS_TOKEN_KIND.toString(), swebHdfsFileSystem.getCanonicalServiceName()));<a name="line.148"></a>
-<span class="sourceLineNo">149</span>  }<a name="line.149"></a>
-<span class="sourceLineNo">150</span>}<a name="line.150"></a>
+<span class="sourceLineNo">113</span>  public void acquireDelegationTokenByTokenKind_webhdfsFileSystem() throws IOException {<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    fsDelegationToken<a name="line.114"></a>
+<span class="sourceLineNo">115</span>        .acquireDelegationToken(WEBHDFS_TOKEN_KIND.toString(), webHdfsFileSystem);<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    assertEquals(fsDelegationToken.getUserToken().getKind(), WEBHDFS_TOKEN_KIND);<a name="line.116"></a>
+<span class="sourceLineNo">117</span>  }<a name="line.117"></a>
+<span class="sourceLineNo">118</span><a name="line.118"></a>
+<span class="sourceLineNo">119</span>  @Test<a name="line.119"></a>
+<span class="sourceLineNo">120</span>  public void acquireDelegationTokenByTokenKind_swebhdfsFileSystem() throws IOException {<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    fsDelegationToken<a name="line.121"></a>
+<span class="sourceLineNo">122</span>        .acquireDelegationToken(<a name="line.122"></a>
+<span class="sourceLineNo">123</span>            SWEBHDFS_TOKEN_KIND.toString(), swebHdfsFileSystem);<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    assertEquals(fsDelegationToken.getUserToken().getKind(), SWEBHDFS_TOKEN_KIND);<a name="line.124"></a>
+<span class="sourceLineNo">125</span>  }<a name="line.125"></a>
+<span class="sourceLineNo">126</span>}<a name="line.126"></a>